You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by mc...@apache.org on 2015/04/28 16:04:35 UTC

[01/50] [abbrv] incubator-nifi git commit: NIFI-271

Repository: incubator-nifi
Updated Branches:
  refs/heads/NIFI-292 ead451fc5 -> 46e691dd5


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitText.java
index aeb887a..aa28cc0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitText.java
@@ -36,16 +36,6 @@ public class TestSplitText {
     final Path dataPath = Paths.get("src/test/resources/TestSplitText");
     final Path file = dataPath.resolve(originalFilename);
 
-//    public static void main(final String[] args) throws IOException {
-//        for (int i=1; i <= 4; i++) {
-//            final Path path = Paths.get("src/test/resources/TestSplitText/" + i + ".txt");
-//            final byte[] data = Files.readAllBytes(path);
-//            final String text = new String(data, StandardCharsets.UTF_8);
-//            final String updated = text.replace("\n", "\r\n");
-//            final Path updatedPath = Paths.get("src/test/resources/TestSplitText/updated/" + i + ".txt");
-//            Files.write(updatedPath, updated.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW);
-//        }
-//    }
     @Test
     public void testRoutesToFailureIfHeaderLinesNotAllPresent() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new SplitText());
@@ -81,22 +71,17 @@ public class TestSplitText {
         runner.assertTransferCount(SplitText.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitText.REL_SPLITS, 4);
 
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitText.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitText.REL_SPLITS);
 
         final String expected0 = "Header Line #1\nHeader Line #2\nLine #1";
         final String expected1 = "Line #2\nLine #3\nLine #4";
         final String expected2 = "Line #5\nLine #6\nLine #7";
         final String expected3 = "Line #8\nLine #9\nLine #10";
 
-        splits.get(0).
-                assertContentEquals(expected0);
-        splits.get(1).
-                assertContentEquals(expected1);
-        splits.get(2).
-                assertContentEquals(expected2);
-        splits.get(3).
-                assertContentEquals(expected3);
+        splits.get(0).assertContentEquals(expected0);
+        splits.get(1).assertContentEquals(expected1);
+        splits.get(2).assertContentEquals(expected2);
+        splits.get(3).assertContentEquals(expected3);
     }
 
     @Test
@@ -112,14 +97,11 @@ public class TestSplitText {
         runner.assertTransferCount(SplitText.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitText.REL_SPLITS, 4);
 
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitText.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitText.REL_SPLITS);
         for (int i = 0; i < splits.size(); i++) {
             final MockFlowFile split = splits.get(i);
-            split.assertContentEquals(file.getParent().
-                    resolve((i + 1) + ".txt"));
-            split.assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.
-                    valueOf(i + 1));
+            split.assertContentEquals(file.getParent().resolve((i + 1) + ".txt"));
+            split.assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.valueOf(i + 1));
         }
     }
 
@@ -136,26 +118,16 @@ public class TestSplitText {
         runner.assertTransferCount(SplitText.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitText.REL_SPLITS, 2);
 
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitText.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals(file.getParent().
-                        resolve("5.txt"));
-        splits.get(0).
-                assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.
-                        valueOf(1));
-        splits.get(1).
-                assertContentEquals(file.getParent().
-                        resolve("6.txt"));
-        splits.get(1).
-                assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.
-                        valueOf(2));
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitText.REL_SPLITS);
+        splits.get(0).assertContentEquals(file.getParent().resolve("5.txt"));
+        splits.get(0).assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.valueOf(1));
+        splits.get(1).assertContentEquals(file.getParent().resolve("6.txt"));
+        splits.get(1).assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.valueOf(2));
     }
 
     @Test
     public void testSplitThenMerge() throws IOException {
-        final TestRunner splitRunner = TestRunners.
-                newTestRunner(new SplitText());
+        final TestRunner splitRunner = TestRunners.newTestRunner(new SplitText());
         splitRunner.setProperty(SplitText.LINE_SPLIT_COUNT, "3");
         splitRunner.setProperty(SplitText.REMOVE_TRAILING_NEWLINES, "false");
 
@@ -166,20 +138,15 @@ public class TestSplitText {
         splitRunner.assertTransferCount(SplitText.REL_ORIGINAL, 1);
         splitRunner.assertTransferCount(SplitText.REL_FAILURE, 0);
 
-        final List<MockFlowFile> splits = splitRunner.
-                getFlowFilesForRelationship(SplitText.REL_SPLITS);
+        final List<MockFlowFile> splits = splitRunner.getFlowFilesForRelationship(SplitText.REL_SPLITS);
         for (final MockFlowFile flowFile : splits) {
-            flowFile.
-                    assertAttributeEquals(SplitText.SEGMENT_ORIGINAL_FILENAME, originalFilename);
+            flowFile.assertAttributeEquals(SplitText.SEGMENT_ORIGINAL_FILENAME, originalFilename);
             flowFile.assertAttributeEquals(SplitText.FRAGMENT_COUNT, "4");
         }
 
-        final TestRunner mergeRunner = TestRunners.
-                newTestRunner(new MergeContent());
-        mergeRunner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
-        mergeRunner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        final TestRunner mergeRunner = TestRunners.newTestRunner(new MergeContent());
+        mergeRunner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        mergeRunner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         mergeRunner.enqueue(splits.toArray(new MockFlowFile[0]));
         mergeRunner.run();
 
@@ -187,13 +154,10 @@ public class TestSplitText {
         mergeRunner.assertTransferCount(MergeContent.REL_ORIGINAL, 4);
         mergeRunner.assertTransferCount(MergeContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> packed = mergeRunner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED);
+        final List<MockFlowFile> packed = mergeRunner.getFlowFilesForRelationship(MergeContent.REL_MERGED);
         MockFlowFile flowFile = packed.get(0);
-        flowFile.
-                assertAttributeEquals(CoreAttributes.FILENAME.key(), originalFilename);
-        assertEquals(Files.size(dataPath.resolve(originalFilename)), flowFile.
-                getSize());
+        flowFile.assertAttributeEquals(CoreAttributes.FILENAME.key(), originalFilename);
+        assertEquals(Files.size(dataPath.resolve(originalFilename)), flowFile.getSize());
         flowFile.assertContentEquals(file);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitXml.java
index 3f9e426..a84e031 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitXml.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.SplitXml;
 import java.io.IOException;
 import java.io.StringReader;
 import java.nio.file.Paths;
@@ -97,8 +96,7 @@ public class TestSplitXml {
         for (MockFlowFile out : flowfiles) {
             final byte[] outData = out.toByteArray();
             final String outXml = new String(outData, "UTF-8");
-            saxParser.
-                    parse(new InputSource(new StringReader(outXml)), new DefaultHandler());
+            saxParser.parse(new InputSource(new StringReader(outXml)), new DefaultHandler());
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestTransformXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestTransformXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestTransformXml.java
index 620cb77..7074ec9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestTransformXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestTransformXml.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.TransformXml;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
@@ -39,18 +38,15 @@ public class TestTransformXml {
 
     @Test
     public void testStylesheetNotFound() throws IOException {
-        final TestRunner controller = TestRunners.
-                newTestRunner(TransformXml.class);
-        controller.
-                setProperty(TransformXml.XSLT_FILE_NAME, "/no/path/to/math.xsl");
+        final TestRunner controller = TestRunners.newTestRunner(TransformXml.class);
+        controller.setProperty(TransformXml.XSLT_FILE_NAME, "/no/path/to/math.xsl");
         controller.assertNotValid();
     }
 
     @Test
     public void testNonXmlContent() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new TransformXml());
-        runner.
-                setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/math.xsl");
+        runner.setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/math.xsl");
 
         final Map<String, String> attributes = new HashMap<>();
         runner.enqueue("not xml".getBytes(), attributes);
@@ -58,11 +54,8 @@ public class TestTransformXml {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(TransformXml.REL_FAILURE);
-        final MockFlowFile original = runner.
-                getFlowFilesForRelationship(TransformXml.REL_FAILURE).
-                get(0);
+        final MockFlowFile original = runner.getFlowFilesForRelationship(TransformXml.REL_FAILURE).get(0);
         final String originalContent = new String(original.toByteArray(), StandardCharsets.UTF_8);
-        System.out.println("originalContent:\n" + originalContent);
 
         original.assertContentEquals("not xml");
     }
@@ -72,32 +65,24 @@ public class TestTransformXml {
     public void testTransformMath() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new TransformXml());
         runner.setProperty("header", "Test for mod");
-        runner.
-                setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/math.xsl");
+        runner.setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/math.xsl");
 
         final Map<String, String> attributes = new HashMap<>();
-        runner.
-                enqueue(Paths.
-                        get("src/test/resources/TestTransformXml/math.xml"), attributes);
+        runner.enqueue(Paths.get("src/test/resources/TestTransformXml/math.xml"), attributes);
         runner.run();
 
         runner.assertAllFlowFilesTransferred(TransformXml.REL_SUCCESS);
-        final MockFlowFile transformed = runner.
-                getFlowFilesForRelationship(TransformXml.REL_SUCCESS).
-                get(0);
+        final MockFlowFile transformed = runner.getFlowFilesForRelationship(TransformXml.REL_SUCCESS).get(0);
         final String transformedContent = new String(transformed.toByteArray(), StandardCharsets.UTF_8);
-        System.out.println("transformedContent:\n" + transformedContent);
 
-        transformed.assertContentEquals(Paths.
-                get("src/test/resources/TestTransformXml/math.html"));
+        transformed.assertContentEquals(Paths.get("src/test/resources/TestTransformXml/math.html"));
     }
 
     @Ignore("this test fails")
     @Test
     public void testTransformCsv() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new TransformXml());
-        runner.
-                setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/tokens.xsl");
+        runner.setProperty(TransformXml.XSLT_FILE_NAME, "src/test/resources/TestTransformXml/tokens.xsl");
         runner.setProperty("uuid_0", "${uuid_0}");
         runner.setProperty("uuid_1", "${uuid_1}");
 
@@ -113,24 +98,18 @@ public class TestTransformXml {
 
         String line = null;
         while ((line = reader.readLine()) != null) {
-            builder.append(line).
-                    append("\n");
+            builder.append(line).append("\n");
         }
         builder.append("</data>");
         String data = builder.toString();
-        System.out.println("Original content:\n" + data);
         runner.enqueue(data.getBytes(), attributes);
         runner.run();
 
         runner.assertAllFlowFilesTransferred(TransformXml.REL_SUCCESS);
-        final MockFlowFile transformed = runner.
-                getFlowFilesForRelationship(TransformXml.REL_SUCCESS).
-                get(0);
+        final MockFlowFile transformed = runner.getFlowFilesForRelationship(TransformXml.REL_SUCCESS).get(0);
         final String transformedContent = new String(transformed.toByteArray(), StandardCharsets.ISO_8859_1);
-        System.out.println("transformedContent:\n" + transformedContent);
 
-        transformed.assertContentEquals(Paths.
-                get("src/test/resources/TestTransformXml/tokens.xml"));
+        transformed.assertContentEquals(Paths.get("src/test/resources/TestTransformXml/tokens.xml"));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestUnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestUnpackContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestUnpackContent.java
index 6035e08..04fe05a 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestUnpackContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestUnpackContent.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.UnpackContent;
-import org.apache.nifi.processors.standard.MergeContent;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
@@ -36,14 +34,12 @@ import org.junit.Test;
 
 public class TestUnpackContent {
 
-    private static final Path dataPath = Paths.
-            get("src/test/resources/TestUnpackContent");
+    private static final Path dataPath = Paths.get("src/test/resources/TestUnpackContent");
 
     @Test
     public void testTar() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new UnpackContent());
-        runner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
+        runner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
 
         runner.enqueue(dataPath.resolve("data.tar"));
         runner.run();
@@ -52,15 +48,11 @@ public class TestUnpackContent {
         runner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = runner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = runner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            final String filename = flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key());
-            final String folder = flowFile.getAttribute(CoreAttributes.PATH.
-                    key());
-            final Path path = dataPath.resolve(folder).
-                    resolve(filename);
+            final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
+            final String folder = flowFile.getAttribute(CoreAttributes.PATH.key());
+            final Path path = dataPath.resolve(folder).resolve(filename);
             assertTrue(Files.exists(path));
 
             flowFile.assertContentEquals(path.toFile());
@@ -70,8 +62,7 @@ public class TestUnpackContent {
     @Test
     public void testZip() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new UnpackContent());
-        runner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
+        runner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
         runner.enqueue(dataPath.resolve("data.zip"));
 
         runner.run();
@@ -80,15 +71,11 @@ public class TestUnpackContent {
         runner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = runner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = runner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            final String filename = flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key());
-            final String folder = flowFile.getAttribute(CoreAttributes.PATH.
-                    key());
-            final Path path = dataPath.resolve(folder).
-                    resolve(filename);
+            final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
+            final String folder = flowFile.getAttribute(CoreAttributes.PATH.key());
+            final Path path = dataPath.resolve(folder).resolve(filename);
             assertTrue(Files.exists(path));
 
             flowFile.assertContentEquals(path.toFile());
@@ -98,8 +85,7 @@ public class TestUnpackContent {
     @Test
     public void testFlowFileStreamV3() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new UnpackContent());
-        runner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.FLOWFILE_STREAM_FORMAT_V3);
+        runner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.FLOWFILE_STREAM_FORMAT_V3);
         runner.enqueue(dataPath.resolve("data.flowfilev3"));
 
         runner.run();
@@ -108,15 +94,11 @@ public class TestUnpackContent {
         runner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = runner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = runner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            final String filename = flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key());
-            final String folder = flowFile.getAttribute(CoreAttributes.PATH.
-                    key());
-            final Path path = dataPath.resolve(folder).
-                    resolve(filename);
+            final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
+            final String folder = flowFile.getAttribute(CoreAttributes.PATH.key());
+            final Path path = dataPath.resolve(folder).resolve(filename);
             assertTrue(Files.exists(path));
 
             flowFile.assertContentEquals(path.toFile());
@@ -126,8 +108,7 @@ public class TestUnpackContent {
     @Test
     public void testFlowFileStreamV2() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new UnpackContent());
-        runner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.FLOWFILE_STREAM_FORMAT_V2);
+        runner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.FLOWFILE_STREAM_FORMAT_V2);
         runner.enqueue(dataPath.resolve("data.flowfilev2"));
 
         runner.run();
@@ -136,15 +117,11 @@ public class TestUnpackContent {
         runner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = runner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = runner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            final String filename = flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key());
-            final String folder = flowFile.getAttribute(CoreAttributes.PATH.
-                    key());
-            final Path path = dataPath.resolve(folder).
-                    resolve(filename);
+            final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
+            final String folder = flowFile.getAttribute(CoreAttributes.PATH.key());
+            final Path path = dataPath.resolve(folder).resolve(filename);
             assertTrue(Files.exists(path));
 
             flowFile.assertContentEquals(path.toFile());
@@ -153,10 +130,8 @@ public class TestUnpackContent {
 
     @Test
     public void testTarThenMerge() throws IOException {
-        final TestRunner unpackRunner = TestRunners.
-                newTestRunner(new UnpackContent());
-        unpackRunner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
+        final TestRunner unpackRunner = TestRunners.newTestRunner(new UnpackContent());
+        unpackRunner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
 
         unpackRunner.enqueue(dataPath.resolve("data.tar"));
         unpackRunner.run();
@@ -165,19 +140,14 @@ public class TestUnpackContent {
         unpackRunner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         unpackRunner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = unpackRunner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = unpackRunner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            assertEquals(flowFile.
-                    getAttribute(UnpackContent.SEGMENT_ORIGINAL_FILENAME), "data");
+            assertEquals(flowFile.getAttribute(UnpackContent.SEGMENT_ORIGINAL_FILENAME), "data");
         }
 
-        final TestRunner mergeRunner = TestRunners.
-                newTestRunner(new MergeContent());
-        mergeRunner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);
-        mergeRunner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        final TestRunner mergeRunner = TestRunners.newTestRunner(new MergeContent());
+        mergeRunner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);
+        mergeRunner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         mergeRunner.setProperty(MergeContent.KEEP_PATH, "true");
         mergeRunner.enqueue(unpacked.toArray(new MockFlowFile[0]));
         mergeRunner.run();
@@ -186,20 +156,16 @@ public class TestUnpackContent {
         mergeRunner.assertTransferCount(MergeContent.REL_ORIGINAL, 2);
         mergeRunner.assertTransferCount(MergeContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> packed = mergeRunner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED);
+        final List<MockFlowFile> packed = mergeRunner.getFlowFilesForRelationship(MergeContent.REL_MERGED);
         for (final MockFlowFile flowFile : packed) {
-            flowFile.
-                    assertAttributeEquals(CoreAttributes.FILENAME.key(), "data.tar");
+            flowFile.assertAttributeEquals(CoreAttributes.FILENAME.key(), "data.tar");
         }
     }
 
     @Test
     public void testZipThenMerge() throws IOException {
-        final TestRunner unpackRunner = TestRunners.
-                newTestRunner(new UnpackContent());
-        unpackRunner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
+        final TestRunner unpackRunner = TestRunners.newTestRunner(new UnpackContent());
+        unpackRunner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
 
         unpackRunner.enqueue(dataPath.resolve("data.zip"));
         unpackRunner.run();
@@ -208,19 +174,14 @@ public class TestUnpackContent {
         unpackRunner.assertTransferCount(UnpackContent.REL_ORIGINAL, 1);
         unpackRunner.assertTransferCount(UnpackContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> unpacked = unpackRunner.
-                getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
+        final List<MockFlowFile> unpacked = unpackRunner.getFlowFilesForRelationship(UnpackContent.REL_SUCCESS);
         for (final MockFlowFile flowFile : unpacked) {
-            assertEquals(flowFile.
-                    getAttribute(UnpackContent.SEGMENT_ORIGINAL_FILENAME), "data");
+            assertEquals(flowFile.getAttribute(UnpackContent.SEGMENT_ORIGINAL_FILENAME), "data");
         }
 
-        final TestRunner mergeRunner = TestRunners.
-                newTestRunner(new MergeContent());
-        mergeRunner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_ZIP);
-        mergeRunner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        final TestRunner mergeRunner = TestRunners.newTestRunner(new MergeContent());
+        mergeRunner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_ZIP);
+        mergeRunner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         mergeRunner.setProperty(MergeContent.KEEP_PATH, "true");
         mergeRunner.enqueue(unpacked.toArray(new MockFlowFile[0]));
         mergeRunner.run();
@@ -229,20 +190,16 @@ public class TestUnpackContent {
         mergeRunner.assertTransferCount(MergeContent.REL_ORIGINAL, 2);
         mergeRunner.assertTransferCount(MergeContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> packed = mergeRunner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED);
+        final List<MockFlowFile> packed = mergeRunner.getFlowFilesForRelationship(MergeContent.REL_MERGED);
         for (final MockFlowFile flowFile : packed) {
-            flowFile.
-                    assertAttributeEquals(CoreAttributes.FILENAME.key(), "data.zip");
+            flowFile.assertAttributeEquals(CoreAttributes.FILENAME.key(), "data.zip");
         }
     }
 
     @Test
     public void testZipHandlesBadData() throws IOException {
-        final TestRunner unpackRunner = TestRunners.
-                newTestRunner(new UnpackContent());
-        unpackRunner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
+        final TestRunner unpackRunner = TestRunners.newTestRunner(new UnpackContent());
+        unpackRunner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.ZIP_FORMAT);
 
         unpackRunner.enqueue(dataPath.resolve("data.tar"));
         unpackRunner.run();
@@ -254,10 +211,8 @@ public class TestUnpackContent {
 
     @Test
     public void testTarHandlesBadData() throws IOException {
-        final TestRunner unpackRunner = TestRunners.
-                newTestRunner(new UnpackContent());
-        unpackRunner.
-                setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
+        final TestRunner unpackRunner = TestRunners.newTestRunner(new UnpackContent());
+        unpackRunner.setProperty(UnpackContent.PACKAGING_FORMAT, UnpackContent.TAR_FORMAT);
 
         unpackRunner.enqueue(dataPath.resolve("data.zip"));
         unpackRunner.run();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateXml.java
index d550183..7dfe5b6 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateXml.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ValidateXml;
 import java.io.IOException;
 import java.nio.file.Paths;
 
@@ -31,8 +30,7 @@ public class TestValidateXml {
     @Test
     public void testValid() throws IOException, SAXException {
         final TestRunner runner = TestRunners.newTestRunner(new ValidateXml());
-        runner.
-                setProperty(ValidateXml.SCHEMA_FILE, "src/test/resources/TestXml/XmlBundle.xsd");
+        runner.setProperty(ValidateXml.SCHEMA_FILE, "src/test/resources/TestXml/XmlBundle.xsd");
 
         runner.enqueue(Paths.get("src/test/resources/TestXml/xml-snippet.xml"));
         runner.run();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/UserAgentTestingServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/UserAgentTestingServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/UserAgentTestingServlet.java
index 347c338..e7fee65 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/UserAgentTestingServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/UserAgentTestingServlet.java
@@ -37,6 +37,5 @@ public class UserAgentTestingServlet extends HttpServlet {
         } else {
             response.setStatus(500);
         }
-        return;
     }
 }


[10/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
index 53ed961..46629fe 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
@@ -62,35 +62,35 @@ public class ScanAttribute extends AbstractProcessor {
     public static final String MATCH_CRITERIA_ALL = "All Must Match";
     public static final String MATCH_CRITERIA_ANY = "At Least 1 Must Match";
 
-    public static final PropertyDescriptor MATCHING_CRITERIA = new PropertyDescriptor.Builder().
-            name("Match Criteria").
-            description("If set to All Must Match, then FlowFiles will be routed to 'matched' only if all specified "
+    public static final PropertyDescriptor MATCHING_CRITERIA = new PropertyDescriptor.Builder()
+            .name("Match Criteria")
+            .description("If set to All Must Match, then FlowFiles will be routed to 'matched' only if all specified "
                     + "attributes' values are found in the dictionary. If set to At Least 1 Must Match, FlowFiles will "
-                    + "be routed to 'matched' if any attribute specified is found in the dictionary").
-            required(true).
-            allowableValues(MATCH_CRITERIA_ANY, MATCH_CRITERIA_ALL).
-            defaultValue(MATCH_CRITERIA_ANY).
-            build();
-    public static final PropertyDescriptor ATTRIBUTE_PATTERN = new PropertyDescriptor.Builder().
-            name("Attribute Pattern").
-            description("Regular Expression that specifies the names of attributes whose values will be matched against the terms in the dictionary").
-            required(true).
-            addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR).
-            defaultValue(".*").
-            build();
-    public static final PropertyDescriptor DICTIONARY_FILE = new PropertyDescriptor.Builder().
-            name("Dictionary File").
-            description("A new-line-delimited text file that includes the terms that should trigger a match. Empty lines are ignored.").
-            required(true).
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            build();
-    public static final PropertyDescriptor DICTIONARY_FILTER = new PropertyDescriptor.Builder().
-            name("Dictionary Filter Pattern").
-            description("A Regular Expression that will be applied to each line in the dictionary file. If the regular expression does not match the line, the line will not be included in the list of terms to search for. If a Matching Group is specified, only the portion of the term that matches that Matching Group will be used instead of the entire term. If not specified, all terms in the dictionary will be used and each term will consist of the text of the entire line in the file").
-            required(false).
-            addValidator(StandardValidators.createRegexValidator(0, 1, false)).
-            defaultValue(null).
-            build();
+                    + "be routed to 'matched' if any attribute specified is found in the dictionary")
+            .required(true)
+            .allowableValues(MATCH_CRITERIA_ANY, MATCH_CRITERIA_ALL)
+            .defaultValue(MATCH_CRITERIA_ANY)
+            .build();
+    public static final PropertyDescriptor ATTRIBUTE_PATTERN = new PropertyDescriptor.Builder()
+            .name("Attribute Pattern")
+            .description("Regular Expression that specifies the names of attributes whose values will be matched against the terms in the dictionary")
+            .required(true)
+            .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
+            .defaultValue(".*")
+            .build();
+    public static final PropertyDescriptor DICTIONARY_FILE = new PropertyDescriptor.Builder()
+            .name("Dictionary File")
+            .description("A new-line-delimited text file that includes the terms that should trigger a match. Empty lines are ignored.")
+            .required(true)
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor DICTIONARY_FILTER = new PropertyDescriptor.Builder()
+            .name("Dictionary Filter Pattern")
+            .description("A Regular Expression that will be applied to each line in the dictionary file. If the regular expression does not match the line, the line will not be included in the list of terms to search for. If a Matching Group is specified, only the portion of the term that matches that Matching Group will be used instead of the entire term. If not specified, all terms in the dictionary will be used and each term will consist of the text of the entire line in the file")
+            .required(false)
+            .addValidator(StandardValidators.createRegexValidator(0, 1, false))
+            .defaultValue(null)
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -100,14 +100,14 @@ public class ScanAttribute extends AbstractProcessor {
     private volatile Set<String> dictionaryTerms = null;
     private volatile SynchronousFileWatcher fileWatcher = null;
 
-    public static final Relationship REL_MATCHED = new Relationship.Builder().
-            name("matched").
-            description("FlowFiles whose attributes are found in the dictionary will be routed to this relationship").
-            build();
-    public static final Relationship REL_UNMATCHED = new Relationship.Builder().
-            name("unmatched").
-            description("FlowFiles whose attributes are not found in the dictionary will be routed to this relationship").
-            build();
+    public static final Relationship REL_MATCHED = new Relationship.Builder()
+            .name("matched")
+            .description("FlowFiles whose attributes are found in the dictionary will be routed to this relationship")
+            .build();
+    public static final Relationship REL_UNMATCHED = new Relationship.Builder()
+            .name("unmatched")
+            .description("FlowFiles whose attributes are not found in the dictionary will be routed to this relationship")
+            .build();
 
     @Override
     protected void init(final ProcessorInitializationContext context) {
@@ -136,41 +136,32 @@ public class ScanAttribute extends AbstractProcessor {
 
     @OnScheduled
     public void onScheduled(final ProcessContext context) throws IOException {
-        final String filterRegex = context.getProperty(DICTIONARY_FILTER).
-                getValue();
-        this.dictionaryFilterPattern = (filterRegex == null) ? null : Pattern.
-                compile(filterRegex);
+        final String filterRegex = context.getProperty(DICTIONARY_FILTER).getValue();
+        this.dictionaryFilterPattern = (filterRegex == null) ? null : Pattern.compile(filterRegex);
 
-        final String attributeRegex = context.getProperty(ATTRIBUTE_PATTERN).
-                getValue();
-        this.attributePattern = (attributeRegex.equals(".*")) ? null : Pattern.
-                compile(attributeRegex);
+        final String attributeRegex = context.getProperty(ATTRIBUTE_PATTERN).getValue();
+        this.attributePattern = (attributeRegex.equals(".*")) ? null : Pattern.compile(attributeRegex);
 
         this.dictionaryTerms = createDictionary(context);
-        this.fileWatcher = new SynchronousFileWatcher(Paths.get(context.
-                getProperty(DICTIONARY_FILE).
-                getValue()), new LastModifiedMonitor(), 1000L);
+        this.fileWatcher = new SynchronousFileWatcher(Paths.get(context.getProperty(DICTIONARY_FILE).getValue()), new LastModifiedMonitor(), 1000L);
     }
 
     private Set<String> createDictionary(final ProcessContext context) throws IOException {
         final Set<String> terms = new HashSet<>();
 
-        final File file = new File(context.getProperty(DICTIONARY_FILE).
-                getValue());
+        final File file = new File(context.getProperty(DICTIONARY_FILE).getValue());
         try (final InputStream fis = new FileInputStream(file);
                 final BufferedReader reader = new BufferedReader(new InputStreamReader(fis))) {
 
             String line;
             while ((line = reader.readLine()) != null) {
-                if (line.trim().
-                        isEmpty()) {
+                if (line.trim().isEmpty()) {
                     continue;
                 }
 
                 String matchingTerm = line;
                 if (dictionaryFilterPattern != null) {
-                    final Matcher matcher = dictionaryFilterPattern.
-                            matcher(line);
+                    final Matcher matcher = dictionaryFilterPattern.matcher(line);
                     if (!matcher.matches()) {
                         continue;
                     }
@@ -207,27 +198,20 @@ public class ScanAttribute extends AbstractProcessor {
             logger.error("Unable to reload dictionary due to {}", e);
         }
 
-        final boolean matchAll = context.getProperty(MATCHING_CRITERIA).
-                getValue().
-                equals(MATCH_CRITERIA_ALL);
+        final boolean matchAll = context.getProperty(MATCHING_CRITERIA).getValue().equals(MATCH_CRITERIA_ALL);
 
         for (final FlowFile flowFile : flowFiles) {
             final boolean matched = matchAll ? allMatch(flowFile, attributePattern, dictionaryTerms) : anyMatch(flowFile, attributePattern, dictionaryTerms);
             final Relationship relationship = matched ? REL_MATCHED : REL_UNMATCHED;
-            session.getProvenanceReporter().
-                    route(flowFile, relationship);
+            session.getProvenanceReporter().route(flowFile, relationship);
             session.transfer(flowFile, relationship);
-            logger.
-                    info("Transferred {} to {}", new Object[]{flowFile, relationship});
+            logger.info("Transferred {} to {}", new Object[]{flowFile, relationship});
         }
     }
 
     private boolean allMatch(final FlowFile flowFile, final Pattern attributePattern, final Set<String> dictionary) {
-        for (final Map.Entry<String, String> entry : flowFile.getAttributes().
-                entrySet()) {
-            if (attributePattern == null || attributePattern.matcher(entry.
-                    getKey()).
-                    matches()) {
+        for (final Map.Entry<String, String> entry : flowFile.getAttributes().entrySet()) {
+            if (attributePattern == null || attributePattern.matcher(entry.getKey()).matches()) {
                 if (!dictionary.contains(entry.getValue())) {
                     return false;
                 }
@@ -238,11 +222,8 @@ public class ScanAttribute extends AbstractProcessor {
     }
 
     private boolean anyMatch(final FlowFile flowFile, final Pattern attributePattern, final Set<String> dictionary) {
-        for (final Map.Entry<String, String> entry : flowFile.getAttributes().
-                entrySet()) {
-            if (attributePattern == null || attributePattern.matcher(entry.
-                    getKey()).
-                    matches()) {
+        for (final Map.Entry<String, String> entry : flowFile.getAttributes().entrySet()) {
+            if (attributePattern == null || attributePattern.matcher(entry.getKey()).matches()) {
                 if (dictionary.contains(entry.getValue())) {
                     return true;
                 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
index 28d48ad..ab5e8b5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
@@ -74,31 +74,31 @@ public class ScanContent extends AbstractProcessor {
     public static final String BINARY_ENCODING = "binary";
     public static final String MATCH_ATTRIBUTE_KEY = "matching.term";
 
-    public static final PropertyDescriptor DICTIONARY = new PropertyDescriptor.Builder().
-            name("Dictionary File").
-            description("The filename of the terms dictionary").
-            required(true).
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            build();
-    public static final PropertyDescriptor DICTIONARY_ENCODING = new PropertyDescriptor.Builder().
-            name("Dictionary Encoding").
-            description("Indicates how the dictionary is encoded. If 'text', dictionary terms are new-line delimited and UTF-8 encoded; "
-                    + "if 'binary', dictionary terms are denoted by a 4-byte integer indicating the term length followed by the term itself").
-            required(true).
-            allowableValues(TEXT_ENCODING, BINARY_ENCODING).
-            defaultValue(TEXT_ENCODING).
-            build();
-
-    public static final Relationship REL_MATCH = new Relationship.Builder().
-            name("matched").
-            description("FlowFiles that match at least one "
-                    + "term in the dictionary are routed to this relationship").
-            build();
-    public static final Relationship REL_NO_MATCH = new Relationship.Builder().
-            name("unmatched").
-            description("FlowFiles that do not match any "
-                    + "term in the dictionary are routed to this relationship").
-            build();
+    public static final PropertyDescriptor DICTIONARY = new PropertyDescriptor.Builder()
+            .name("Dictionary File")
+            .description("The filename of the terms dictionary")
+            .required(true)
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor DICTIONARY_ENCODING = new PropertyDescriptor.Builder()
+            .name("Dictionary Encoding")
+            .description("Indicates how the dictionary is encoded. If 'text', dictionary terms are new-line delimited and UTF-8 encoded; "
+                    + "if 'binary', dictionary terms are denoted by a 4-byte integer indicating the term length followed by the term itself")
+            .required(true)
+            .allowableValues(TEXT_ENCODING, BINARY_ENCODING)
+            .defaultValue(TEXT_ENCODING)
+            .build();
+
+    public static final Relationship REL_MATCH = new Relationship.Builder()
+            .name("matched")
+            .description("FlowFiles that match at least one "
+                    + "term in the dictionary are routed to this relationship")
+            .build();
+    public static final Relationship REL_NO_MATCH = new Relationship.Builder()
+            .name("unmatched")
+            .description("FlowFiles that do not match any "
+                    + "term in the dictionary are routed to this relationship")
+            .build();
 
     public static final Charset UTF8 = Charset.forName("UTF-8");
 
@@ -135,8 +135,7 @@ public class ScanContent extends AbstractProcessor {
     @Override
     public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
         if (descriptor.equals(DICTIONARY)) {
-            fileWatcherRef.
-                    set(new SynchronousFileWatcher(Paths.get(newValue), new LastModifiedMonitor(), 60000L));
+            fileWatcherRef.set(new SynchronousFileWatcher(Paths.get(newValue), new LastModifiedMonitor(), 60000L));
         }
     }
 
@@ -154,14 +153,10 @@ public class ScanContent extends AbstractProcessor {
                 final Search<byte[]> search = new AhoCorasick<>();
                 final Set<SearchTerm<byte[]>> terms = new HashSet<>();
 
-                final InputStream inStream = Files.newInputStream(Paths.
-                        get(context.getProperty(DICTIONARY).
-                                getValue()), StandardOpenOption.READ);
+                final InputStream inStream = Files.newInputStream(Paths.get(context.getProperty(DICTIONARY).getValue()), StandardOpenOption.READ);
 
                 final TermLoader termLoader;
-                if (context.getProperty(DICTIONARY_ENCODING).
-                        getValue().
-                        equalsIgnoreCase(TEXT_ENCODING)) {
+                if (context.getProperty(DICTIONARY_ENCODING).getValue().equalsIgnoreCase(TEXT_ENCODING)) {
                     termLoader = new TextualTermLoader(inStream);
                 } else {
                     termLoader = new BinaryTermLoader(inStream);
@@ -175,10 +170,7 @@ public class ScanContent extends AbstractProcessor {
 
                     search.initializeDictionary(terms);
                     searchRef.set(search);
-                    logger.
-                            info("Loaded search dictionary from {}", new Object[]{context.
-                                getProperty(DICTIONARY).
-                                getValue()});
+                    logger.info("Loaded search dictionary from {}", new Object[]{context.getProperty(DICTIONARY).getValue()});
                     return true;
                 } finally {
                     termLoader.close();
@@ -231,13 +223,9 @@ public class ScanContent extends AbstractProcessor {
             @Override
             public void process(final InputStream rawIn) throws IOException {
                 try (final InputStream in = new BufferedInputStream(rawIn)) {
-                    final SearchState<byte[]> searchResult = finalSearch.
-                            search(in, false);
+                    final SearchState<byte[]> searchResult = finalSearch.search(in, false);
                     if (searchResult.foundMatch()) {
-                        termRef.set(searchResult.getResults().
-                                keySet().
-                                iterator().
-                                next());
+                        termRef.set(searchResult.getResults().keySet().iterator().next());
                     }
                 }
             }
@@ -246,17 +234,13 @@ public class ScanContent extends AbstractProcessor {
         final SearchTerm<byte[]> matchingTerm = termRef.get();
         if (matchingTerm == null) {
             logger.info("Routing {} to 'unmatched'", new Object[]{flowFile});
-            session.getProvenanceReporter().
-                    route(flowFile, REL_NO_MATCH);
+            session.getProvenanceReporter().route(flowFile, REL_NO_MATCH);
             session.transfer(flowFile, REL_NO_MATCH);
         } else {
             final String matchingTermString = matchingTerm.toString(UTF8);
-            logger.
-                    info("Routing {} to 'matched' because it matched term {}", new Object[]{flowFile, matchingTermString});
-            flowFile = session.
-                    putAttribute(flowFile, MATCH_ATTRIBUTE_KEY, matchingTermString);
-            session.getProvenanceReporter().
-                    route(flowFile, REL_MATCH);
+            logger.info("Routing {} to 'matched' because it matched term {}", new Object[]{flowFile, matchingTermString});
+            flowFile = session.putAttribute(flowFile, MATCH_ATTRIBUTE_KEY, matchingTermString);
+            session.getProvenanceReporter().route(flowFile, REL_MATCH);
             session.transfer(flowFile, REL_MATCH);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
index 071f6fb..e5e90ea 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
@@ -83,22 +83,22 @@ public class SegmentContent extends AbstractProcessor {
     public static final String FRAGMENT_INDEX = "fragment.index";
     public static final String FRAGMENT_COUNT = "fragment.count";
 
-    public static final PropertyDescriptor SIZE = new PropertyDescriptor.Builder().
-            name("Segment Size").
-            description("The maximum data size for each segment").
-            addValidator(StandardValidators.DATA_SIZE_VALIDATOR).
-            required(true).
-            build();
-
-    public static final Relationship REL_SEGMENTS = new Relationship.Builder().
-            name("segments").
-            description("All segments will be sent to this relationship. If the file was small enough that it was not segmented, "
-                    + "a copy of the original is sent to this relationship as well as original").
-            build();
-    public static final Relationship REL_ORIGINAL = new Relationship.Builder().
-            name("original").
-            description("The original FlowFile will be sent to this relationship").
-            build();
+    public static final PropertyDescriptor SIZE = new PropertyDescriptor.Builder()
+            .name("Segment Size")
+            .description("The maximum data size for each segment")
+            .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
+            .required(true)
+            .build();
+
+    public static final Relationship REL_SEGMENTS = new Relationship.Builder()
+            .name("segments")
+            .description("All segments will be sent to this relationship. If the file was small enough that it was not segmented, "
+                    + "a copy of the original is sent to this relationship as well as original")
+            .build();
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original FlowFile will be sent to this relationship")
+            .build();
 
     private Set<Relationship> relationships;
     private List<PropertyDescriptor> propertyDescriptors;
@@ -132,21 +132,16 @@ public class SegmentContent extends AbstractProcessor {
             return;
         }
 
-        final String segmentId = UUID.randomUUID().
-                toString();
-        final long segmentSize = context.getProperty(SIZE).
-                asDataSize(DataUnit.B).
-                longValue();
+        final String segmentId = UUID.randomUUID().toString();
+        final long segmentSize = context.getProperty(SIZE).asDataSize(DataUnit.B).longValue();
 
-        final String originalFileName = flowFile.
-                getAttribute(CoreAttributes.FILENAME.key());
+        final String originalFileName = flowFile.getAttribute(CoreAttributes.FILENAME.key());
 
         if (flowFile.getSize() <= segmentSize) {
             flowFile = session.putAttribute(flowFile, SEGMENT_ID, segmentId);
             flowFile = session.putAttribute(flowFile, SEGMENT_INDEX, "1");
             flowFile = session.putAttribute(flowFile, SEGMENT_COUNT, "1");
-            flowFile = session.
-                    putAttribute(flowFile, SEGMENT_ORIGINAL_FILENAME, originalFileName);
+            flowFile = session.putAttribute(flowFile, SEGMENT_ORIGINAL_FILENAME, originalFileName);
 
             flowFile = session.putAttribute(flowFile, FRAGMENT_ID, segmentId);
             flowFile = session.putAttribute(flowFile, FRAGMENT_INDEX, "1");
@@ -174,8 +169,7 @@ public class SegmentContent extends AbstractProcessor {
         final Set<FlowFile> segmentSet = new HashSet<>();
         for (int i = 1; i <= totalSegments; i++) {
             final long segmentOffset = segmentSize * (i - 1);
-            FlowFile segment = session.clone(flowFile, segmentOffset, Math.
-                    min(segmentSize, flowFile.getSize() - segmentOffset));
+            FlowFile segment = session.clone(flowFile, segmentOffset, Math.min(segmentSize, flowFile.getSize() - segmentOffset));
             segmentAttributes.put(SEGMENT_INDEX, String.valueOf(i));
             segmentAttributes.put(FRAGMENT_INDEX, String.valueOf(i));
             segment = session.putAllAttributes(segment, segmentAttributes);
@@ -186,11 +180,9 @@ public class SegmentContent extends AbstractProcessor {
         session.transfer(flowFile, REL_ORIGINAL);
 
         if (totalSegments <= 10) {
-            getLogger().
-                    info("Segmented {} into {} segments: {}", new Object[]{flowFile, totalSegments, segmentSet});
+            getLogger().info("Segmented {} into {} segments: {}", new Object[]{flowFile, totalSegments, segmentSet});
         } else {
-            getLogger().
-                    info("Segmented {} into {} segments", new Object[]{flowFile, totalSegments});
+            getLogger().info("Segmented {} into {} segments", new Object[]{flowFile, totalSegments});
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
index 1c9a8c5..cfa0bda 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
@@ -85,43 +85,43 @@ public class SplitContent extends AbstractProcessor {
     static final AllowableValue TRAILING_POSITION = new AllowableValue("Trailing", "Trailing", "Keep the Byte Sequence at the end of the first split if <Keep Byte Sequence> is true");
     static final AllowableValue LEADING_POSITION = new AllowableValue("Leading", "Leading", "Keep the Byte Sequence at the beginning of the second split if <Keep Byte Sequence> is true");
 
-    public static final PropertyDescriptor FORMAT = new PropertyDescriptor.Builder().
-            name("Byte Sequence Format").
-            description("Specifies how the <Byte Sequence> property should be interpreted").
-            required(true).
-            allowableValues(HEX_FORMAT, UTF8_FORMAT).
-            defaultValue(HEX_FORMAT.getValue()).
-            build();
-    public static final PropertyDescriptor BYTE_SEQUENCE = new PropertyDescriptor.Builder().
-            name("Byte Sequence").
-            description("A representation of bytes to look for and upon which to split the source file into separate files").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(true).
-            build();
-    public static final PropertyDescriptor KEEP_SEQUENCE = new PropertyDescriptor.Builder().
-            name("Keep Byte Sequence").
-            description("Determines whether or not the Byte Sequence should be included with each Split").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("false").
-            build();
-    public static final PropertyDescriptor BYTE_SEQUENCE_LOCATION = new PropertyDescriptor.Builder().
-            name("Byte Sequence Location").
-            description("If <Keep Byte Sequence> is set to true, specifies whether the byte sequence should be added to the end of the first "
-                    + "split or the beginning of the second; if <Keep Byte Sequence> is false, this property is ignored.").
-            required(true).
-            allowableValues(TRAILING_POSITION, LEADING_POSITION).
-            defaultValue(TRAILING_POSITION.getValue()).
-            build();
+    public static final PropertyDescriptor FORMAT = new PropertyDescriptor.Builder()
+            .name("Byte Sequence Format")
+            .description("Specifies how the <Byte Sequence> property should be interpreted")
+            .required(true)
+            .allowableValues(HEX_FORMAT, UTF8_FORMAT)
+            .defaultValue(HEX_FORMAT.getValue())
+            .build();
+    public static final PropertyDescriptor BYTE_SEQUENCE = new PropertyDescriptor.Builder()
+            .name("Byte Sequence")
+            .description("A representation of bytes to look for and upon which to split the source file into separate files")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(true)
+            .build();
+    public static final PropertyDescriptor KEEP_SEQUENCE = new PropertyDescriptor.Builder()
+            .name("Keep Byte Sequence")
+            .description("Determines whether or not the Byte Sequence should be included with each Split")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .build();
+    public static final PropertyDescriptor BYTE_SEQUENCE_LOCATION = new PropertyDescriptor.Builder()
+            .name("Byte Sequence Location")
+            .description("If <Keep Byte Sequence> is set to true, specifies whether the byte sequence should be added to the end of the first "
+                    + "split or the beginning of the second; if <Keep Byte Sequence> is false, this property is ignored.")
+            .required(true)
+            .allowableValues(TRAILING_POSITION, LEADING_POSITION)
+            .defaultValue(TRAILING_POSITION.getValue())
+            .build();
 
     public static final Relationship REL_SPLITS = new Relationship.Builder()
-            .name("splits").
-            description("All Splits will be routed to the splits relationship").
-            build();
+            .name("splits")
+            .description("All Splits will be routed to the splits relationship")
+            .build();
     public static final Relationship REL_ORIGINAL = new Relationship.Builder()
-            .name("original").
-            description("The original file").
-            build();
+            .name("original")
+            .description("The original file")
+            .build();
 
     private Set<Relationship> relationships;
     private List<PropertyDescriptor> properties;
@@ -156,15 +156,10 @@ public class SplitContent extends AbstractProcessor {
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
         final List<ValidationResult> results = new ArrayList<>(1);
-        final String format = validationContext.getProperty(FORMAT).
-                getValue();
-        if (HEX_FORMAT.getValue().
-                equals(format)) {
-            final String byteSequence = validationContext.
-                    getProperty(BYTE_SEQUENCE).
-                    getValue();
-            final ValidationResult result = new HexStringPropertyValidator().
-                    validate(BYTE_SEQUENCE.getName(), byteSequence, validationContext);
+        final String format = validationContext.getProperty(FORMAT).getValue();
+        if (HEX_FORMAT.getValue().equals(format)) {
+            final String byteSequence = validationContext.getProperty(BYTE_SEQUENCE).getValue();
+            final ValidationResult result = new HexStringPropertyValidator().validate(BYTE_SEQUENCE.getName(), byteSequence, validationContext);
             results.add(result);
         }
         return results;
@@ -172,13 +167,10 @@ public class SplitContent extends AbstractProcessor {
 
     @OnScheduled
     public void initializeByteSequence(final ProcessContext context) throws DecoderException {
-        final String bytePattern = context.getProperty(BYTE_SEQUENCE).
-                getValue();
+        final String bytePattern = context.getProperty(BYTE_SEQUENCE).getValue();
 
-        final String format = context.getProperty(FORMAT).
-                getValue();
-        if (HEX_FORMAT.getValue().
-                equals(format)) {
+        final String format = context.getProperty(FORMAT).getValue();
+        if (HEX_FORMAT.getValue().equals(format)) {
             this.byteSequence.set(Hex.decodeHex(bytePattern.toCharArray()));
         } else {
             this.byteSequence.set(bytePattern.getBytes(StandardCharsets.UTF_8));
@@ -193,14 +185,11 @@ public class SplitContent extends AbstractProcessor {
         }
 
         final ProcessorLog logger = getLogger();
-        final boolean keepSequence = context.getProperty(KEEP_SEQUENCE).
-                asBoolean();
+        final boolean keepSequence = context.getProperty(KEEP_SEQUENCE).asBoolean();
         final boolean keepTrailingSequence;
         final boolean keepLeadingSequence;
         if (keepSequence) {
-            if (context.getProperty(BYTE_SEQUENCE_LOCATION).
-                    getValue().
-                    equals(TRAILING_POSITION.getValue())) {
+            if (context.getProperty(BYTE_SEQUENCE_LOCATION).getValue().equals(TRAILING_POSITION.getValue())) {
                 keepTrailingSequence = true;
                 keepLeadingSequence = false;
             } else {
@@ -214,8 +203,7 @@ public class SplitContent extends AbstractProcessor {
 
         final byte[] byteSequence = this.byteSequence.get();
         if (byteSequence == null) {   // should never happen. But just in case...
-            logger.
-                    error("{} Unable to obtain Byte Sequence", new Object[]{this});
+            logger.error("{} Unable to obtain Byte Sequence", new Object[]{this});
             session.rollback();
             return;
         }
@@ -292,8 +280,7 @@ public class SplitContent extends AbstractProcessor {
             finalSplitOffset += byteSequence.length;
         }
         if (finalSplitOffset > -1L && finalSplitOffset < flowFile.getSize()) {
-            FlowFile finalSplit = session.
-                    clone(flowFile, finalSplitOffset, flowFile.getSize() - finalSplitOffset);
+            FlowFile finalSplit = session.clone(flowFile, finalSplitOffset, flowFile.getSize() - finalSplitOffset);
             splitList.add(finalSplit);
         }
 
@@ -302,13 +289,9 @@ public class SplitContent extends AbstractProcessor {
         session.transfer(flowFile, REL_ORIGINAL);
 
         if (splitList.size() > 10) {
-            logger.
-                    info("Split {} into {} files", new Object[]{flowFile, splitList.
-                        size()});
+            logger.info("Split {} into {} files", new Object[]{flowFile, splitList.size()});
         } else {
-            logger.
-                    info("Split {} into {} files: {}", new Object[]{flowFile, splitList.
-                        size(), splitList});
+            logger.info("Split {} into {} files: {}", new Object[]{flowFile, splitList.size(), splitList});
         }
     }
 
@@ -323,8 +306,7 @@ public class SplitContent extends AbstractProcessor {
         final String originalFilename = source.
                 getAttribute(CoreAttributes.FILENAME.key());
 
-        final String fragmentId = UUID.randomUUID().
-                toString();
+        final String fragmentId = UUID.randomUUID().toString();
         final ArrayList<FlowFile> newList = new ArrayList<>(splits);
         splits.clear();
         for (int i = 1; i <= newList.size(); i++) {
@@ -345,16 +327,9 @@ public class SplitContent extends AbstractProcessor {
         public ValidationResult validate(final String subject, final String input, final ValidationContext validationContext) {
             try {
                 Hex.decodeHex(input.toCharArray());
-                return new ValidationResult.Builder().valid(true).
-                        input(input).
-                        subject(subject).
-                        build();
+                return new ValidationResult.Builder().valid(true).input(input).subject(subject).build();
             } catch (final Exception e) {
-                return new ValidationResult.Builder().valid(false).
-                        explanation("Not a valid Hex String").
-                        input(input).
-                        subject(subject).
-                        build();
+                return new ValidationResult.Builder().valid(false).explanation("Not a valid Hex String").input(input).subject(subject).build();
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
index 2ffebd5..ef7a86a 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
@@ -59,28 +59,27 @@ import java.util.concurrent.atomic.AtomicReference;
         + "does not evaluate to an array element, the original file is routed to 'failure' and no files are generated.")
 public class SplitJson extends AbstractJsonPathProcessor {
 
-    public static final PropertyDescriptor ARRAY_JSON_PATH_EXPRESSION = new PropertyDescriptor.Builder().
-            name("JsonPath Expression").
-            description("A JsonPath expression that indicates the array element to split into JSON/scalar fragments.").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // Full validation/caching occurs in #customValidate
-            .
-            required(true).
-            build();
-
-    public static final Relationship REL_ORIGINAL = new Relationship.Builder().
-            name("original").
-            description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to "
-                    + "this relationship").
-            build();
-    public static final Relationship REL_SPLIT = new Relationship.Builder().
-            name("split").
-            description("All segments of the original FlowFile will be routed to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid JSON or the specified "
-                    + "path does not exist), it will be routed to this relationship").
-            build();
+    public static final PropertyDescriptor ARRAY_JSON_PATH_EXPRESSION = new PropertyDescriptor.Builder()
+            .name("JsonPath Expression")
+            .description("A JsonPath expression that indicates the array element to split into JSON/scalar fragments.")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // Full validation/caching occurs in #customValidate
+            .required(true)
+            .build();
+
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to "
+                    + "this relationship")
+            .build();
+    public static final Relationship REL_SPLIT = new Relationship.Builder()
+            .name("split")
+            .description("All segments of the original FlowFile will be routed to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid JSON or the specified "
+                    + "path does not exist), it will be routed to this relationship")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -137,10 +136,8 @@ public class SplitJson extends AbstractJsonPathProcessor {
             }
         };
 
-        String value = validationContext.getProperty(ARRAY_JSON_PATH_EXPRESSION).
-                getValue();
-        return Collections.singleton(validator.
-                validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext));
+        String value = validationContext.getProperty(ARRAY_JSON_PATH_EXPRESSION).getValue();
+        return Collections.singleton(validator.validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext));
     }
 
     @Override
@@ -156,18 +153,14 @@ public class SplitJson extends AbstractJsonPathProcessor {
         try {
             documentContext = validateAndEstablishJsonContext(processSession, original);
         } catch (InvalidJsonException e) {
-            logger.
-                    error("FlowFile {} did not have valid JSON content.", new Object[]{original});
+            logger.error("FlowFile {} did not have valid JSON content.", new Object[]{original});
             processSession.transfer(original, REL_FAILURE);
             return;
         }
 
         final JsonPath jsonPath = JSON_PATH_REF.get();
-        String representationOption = processContext.
-                getProperty(NULL_VALUE_DEFAULT_REPRESENTATION).
-                getValue();
-        final String nullDefaultValue = NULL_REPRESENTATION_MAP.
-                get(representationOption);
+        String representationOption = processContext.getProperty(NULL_VALUE_DEFAULT_REPRESENTATION).getValue();
+        final String nullDefaultValue = NULL_REPRESENTATION_MAP.get(representationOption);
 
         final List<FlowFile> segments = new ArrayList<>();
 
@@ -175,17 +168,13 @@ public class SplitJson extends AbstractJsonPathProcessor {
         try {
             jsonPathResult = documentContext.read(jsonPath);
         } catch (PathNotFoundException e) {
-            logger.
-                    warn("JsonPath {} could not be found for FlowFile {}", new Object[]{jsonPath.
-                        getPath(), original});
+            logger.warn("JsonPath {} could not be found for FlowFile {}", new Object[]{jsonPath.getPath(), original});
             processSession.transfer(original, REL_FAILURE);
             return;
         }
 
         if (!(jsonPathResult instanceof List)) {
-            logger.
-                    error("The evaluated value {} of {} was not a JSON Array compatible type and cannot be split.",
-                            new Object[]{jsonPathResult, jsonPath.getPath()});
+            logger.error("The evaluated value {} of {} was not a JSON Array compatible type and cannot be split.", new Object[]{jsonPathResult, jsonPath.getPath()});
             processSession.transfer(original, REL_FAILURE);
             return;
         }
@@ -198,20 +187,16 @@ public class SplitJson extends AbstractJsonPathProcessor {
                 @Override
                 public void process(OutputStream out) throws IOException {
                     String resultSegmentContent = getResultRepresentation(resultSegment, nullDefaultValue);
-                    out.write(resultSegmentContent.
-                            getBytes(StandardCharsets.UTF_8));
+                    out.write(resultSegmentContent.getBytes(StandardCharsets.UTF_8));
                 }
             });
             segments.add(split);
         }
 
-        processSession.getProvenanceReporter().
-                fork(original, segments);
+        processSession.getProvenanceReporter().fork(original, segments);
 
         processSession.transfer(segments, REL_SPLIT);
         processSession.transfer(original, REL_ORIGINAL);
-        logger.
-                info("Split {} into {} FlowFiles", new Object[]{original, segments.
-                    size()});
+        logger.info("Split {} into {} FlowFiles", new Object[]{original, segments.size()});
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
index f68ef4e..d641274 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
@@ -77,41 +77,40 @@ public class SplitText extends AbstractProcessor {
     public static final String FRAGMENT_COUNT = "fragment.count";
     public static final String SEGMENT_ORIGINAL_FILENAME = "segment.original.filename";
 
-    public static final PropertyDescriptor LINE_SPLIT_COUNT = new PropertyDescriptor.Builder().
-            name("Line Split Count").
-            description("The number of lines that will be added to each split file").
-            required(true).
-            addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).
-            build();
-    public static final PropertyDescriptor HEADER_LINE_COUNT = new PropertyDescriptor.Builder().
-            name("Header Line Count").
-            description("The number of lines that should be considered part of the header; the header lines will be duplicated to all split files").
-            required(true).
-            addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR).
-            defaultValue("0").
-            build();
-    public static final PropertyDescriptor REMOVE_TRAILING_NEWLINES = new PropertyDescriptor.Builder().
-            name("Remove Trailing Newlines").
-            description(
-                    "Whether to remove newlines at the end of each split file. This should be false if you intend to merge the split files later").
-            required(true).
-            addValidator(StandardValidators.BOOLEAN_VALIDATOR).
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
-
-    public static final Relationship REL_ORIGINAL = new Relationship.Builder().
-            name("original").
-            description("The original input file will be routed to this destination when it has been successfully split into 1 or more files").
-            build();
-    public static final Relationship REL_SPLITS = new Relationship.Builder().
-            name("splits").
-            description("The split files will be routed to this destination when an input file is successfully split into 1 or more split files").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("If a file cannot be split for some reason, the original file will be routed to this destination and nothing will be routed elsewhere").
-            build();
+    public static final PropertyDescriptor LINE_SPLIT_COUNT = new PropertyDescriptor.Builder()
+            .name("Line Split Count")
+            .description("The number of lines that will be added to each split file")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor HEADER_LINE_COUNT = new PropertyDescriptor.Builder()
+            .name("Header Line Count")
+            .description("The number of lines that should be considered part of the header; the header lines will be duplicated to all split files")
+            .required(true)
+            .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+            .defaultValue("0")
+            .build();
+    public static final PropertyDescriptor REMOVE_TRAILING_NEWLINES = new PropertyDescriptor.Builder()
+            .name("Remove Trailing Newlines")
+            .description("Whether to remove newlines at the end of each split file. This should be false if you intend to merge the split files later")
+            .required(true)
+            .addValidator(StandardValidators.BOOLEAN_VALIDATOR)
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
+
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original input file will be routed to this destination when it has been successfully split into 1 or more files")
+            .build();
+    public static final Relationship REL_SPLITS = new Relationship.Builder()
+            .name("splits")
+            .description("The split files will be routed to this destination when an input file is successfully split into 1 or more split files")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("If a file cannot be split for some reason, the original file will be routed to this destination and nothing will be routed elsewhere")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -235,13 +234,9 @@ public class SplitText extends AbstractProcessor {
         }
 
         final ProcessorLog logger = getLogger();
-        final int headerCount = context.getProperty(HEADER_LINE_COUNT).
-                asInteger();
-        final int splitCount = context.getProperty(LINE_SPLIT_COUNT).
-                asInteger();
-        final boolean removeTrailingNewlines = context.
-                getProperty(REMOVE_TRAILING_NEWLINES).
-                asBoolean();
+        final int headerCount = context.getProperty(HEADER_LINE_COUNT).asInteger();
+        final int splitCount = context.getProperty(LINE_SPLIT_COUNT).asInteger();
+        final boolean removeTrailingNewlines = context.getProperty(REMOVE_TRAILING_NEWLINES).asBoolean();
 
         final ObjectHolder<String> errorMessage = new ObjectHolder<>(null);
         final ArrayList<SplitInfo> splitInfos = new ArrayList<>();
@@ -258,8 +253,7 @@ public class SplitText extends AbstractProcessor {
                     final ByteArrayOutputStream headerStream = new ByteArrayOutputStream();
                     final int headerLinesCopied = readLines(in, headerCount, headerStream, true);
                     if (headerLinesCopied < headerCount) {
-                        errorMessage.
-                                set("Header Line Count is set to " + headerCount + " but file had only " + headerLinesCopied + " lines");
+                        errorMessage.set("Header Line Count is set to " + headerCount + " but file had only " + headerLinesCopied + " lines");
                         return;
                     }
 
@@ -270,23 +264,17 @@ public class SplitText extends AbstractProcessor {
                             final IntegerHolder linesCopied = new IntegerHolder(0);
                             FlowFile splitFile = session.create(flowFile);
                             try {
-                                splitFile = session.
-                                        write(splitFile, new OutputStreamCallback() {
-                                            @Override
-                                            public void process(final OutputStream rawOut) throws IOException {
-                                                try (final BufferedOutputStream out = new BufferedOutputStream(rawOut)) {
-                                                    headerStream.writeTo(out);
-                                                    linesCopied.
-                                                    set(readLines(in, splitCount, out, !removeTrailingNewlines));
-                                                }
-                                            }
-                                        });
-                                splitFile = session.
-                                        putAttribute(splitFile, SPLIT_LINE_COUNT, String.
-                                                valueOf(linesCopied.get()));
-                                logger.
-                                        debug("Created Split File {} with {} lines", new Object[]{splitFile, linesCopied.
-                                            get()});
+                                splitFile = session.write(splitFile, new OutputStreamCallback() {
+                                    @Override
+                                    public void process(final OutputStream rawOut) throws IOException {
+                                        try (final BufferedOutputStream out = new BufferedOutputStream(rawOut)) {
+                                            headerStream.writeTo(out);
+                                            linesCopied.set(readLines(in, splitCount, out, !removeTrailingNewlines));
+                                        }
+                                    }
+                                });
+                                splitFile = session.putAttribute(splitFile, SPLIT_LINE_COUNT, String.valueOf(linesCopied.get()));
+                                logger.debug("Created Split File {} with {} lines", new Object[]{splitFile, linesCopied.get()});
                             } finally {
                                 if (linesCopied.get() > 0) {
                                     splits.add(splitFile);
@@ -313,11 +301,10 @@ public class SplitText extends AbstractProcessor {
                                 info.offsetBytes = beforeReadingLines;
                                 splitInfos.add(info);
                                 final long procNanos = System.nanoTime() - startNanos;
-                                final long procMillis = TimeUnit.MILLISECONDS.
-                                        convert(procNanos, TimeUnit.NANOSECONDS);
-                                logger.
-                                        debug("Detected start of Split File in {} at byte offset {} with a length of {} bytes; total splits = {}; total processing time = {} ms", new Object[]{flowFile, beforeReadingLines, info.lengthBytes, splitInfos.
-                                            size(), procMillis});
+                                final long procMillis = TimeUnit.MILLISECONDS.convert(procNanos, TimeUnit.NANOSECONDS);
+                                logger.debug("Detected start of Split File in {} at byte offset {} with a length of {} bytes; "
+                                        + "total splits = {}; total processing time = {} ms",
+                                        new Object[]{flowFile, beforeReadingLines, info.lengthBytes, splitInfos.size(), procMillis});
                             }
                         }
                     }
@@ -326,9 +313,7 @@ public class SplitText extends AbstractProcessor {
         });
 
         if (errorMessage.get() != null) {
-            logger.
-                    error("Unable to split {} due to {}; routing to failure", new Object[]{flowFile, errorMessage.
-                        get()});
+            logger.error("Unable to split {} due to {}; routing to failure", new Object[]{flowFile, errorMessage.get()});
             session.transfer(flowFile, REL_FAILURE);
             if (splits != null && !splits.isEmpty()) {
                 session.remove(splits);
@@ -339,22 +324,17 @@ public class SplitText extends AbstractProcessor {
         if (!splitInfos.isEmpty()) {
             // Create the splits
             for (final SplitInfo info : splitInfos) {
-                FlowFile split = session.
-                        clone(flowFile, info.offsetBytes, info.lengthBytes);
-                split = session.putAttribute(split, SPLIT_LINE_COUNT, String.
-                        valueOf(info.lengthLines));
+                FlowFile split = session.clone(flowFile, info.offsetBytes, info.lengthBytes);
+                split = session.putAttribute(split, SPLIT_LINE_COUNT, String.valueOf(info.lengthLines));
                 splits.add(split);
             }
         }
         finishFragmentAttributes(session, flowFile, splits);
 
         if (splits.size() > 10) {
-            logger.info("Split {} into {} files", new Object[]{flowFile, splits.
-                size()});
+            logger.info("Split {} into {} files", new Object[]{flowFile, splits.size()});
         } else {
-            logger.
-                    info("Split {} into {} files: {}", new Object[]{flowFile, splits.
-                        size(), splits});
+            logger.info("Split {} into {} files: {}", new Object[]{flowFile, splits.size(), splits});
         }
 
         session.transfer(flowFile, REL_ORIGINAL);
@@ -369,11 +349,9 @@ public class SplitText extends AbstractProcessor {
      * @param unpacked
      */
     private void finishFragmentAttributes(final ProcessSession session, final FlowFile source, final List<FlowFile> splits) {
-        final String originalFilename = source.
-                getAttribute(CoreAttributes.FILENAME.key());
+        final String originalFilename = source.getAttribute(CoreAttributes.FILENAME.key());
 
-        final String fragmentId = UUID.randomUUID().
-                toString();
+        final String fragmentId = UUID.randomUUID().toString();
         final ArrayList<FlowFile> newList = new ArrayList<>(splits);
         splits.clear();
         for (int i = 1; i <= newList.size(); i++) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index 8e80e91..adbfff2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -66,26 +66,26 @@ import org.xml.sax.XMLReader;
 @CapabilityDescription("Splits an XML File into multiple separate FlowFiles, each comprising a child or descendant of the original root element")
 public class SplitXml extends AbstractProcessor {
 
-    public static final PropertyDescriptor SPLIT_DEPTH = new PropertyDescriptor.Builder().
-            name("Split Depth").
-            description("Indicates the XML-nesting depth to start splitting XML fragments. A depth of 1 means split the root's children, whereas a depth of 2 means split the root's children's children and so forth.").
-            required(true).
-            addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).
-            defaultValue("1").
-            build();
-
-    public static final Relationship REL_ORIGINAL = new Relationship.Builder().
-            name("original").
-            description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to this relationship").
-            build();
-    public static final Relationship REL_SPLIT = new Relationship.Builder().
-            name("split").
-            description("All segments of the original FlowFile will be routed to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship").
-            build();
+    public static final PropertyDescriptor SPLIT_DEPTH = new PropertyDescriptor.Builder()
+            .name("Split Depth")
+            .description("Indicates the XML-nesting depth to start splitting XML fragments. A depth of 1 means split the root's children, whereas a depth of 2 means split the root's children's children and so forth.")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .defaultValue("1")
+            .build();
+
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to this relationship")
+            .build();
+    public static final Relationship REL_SPLIT = new Relationship.Builder()
+            .name("split")
+            .description("All segments of the original FlowFile will be routed to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -93,8 +93,7 @@ public class SplitXml extends AbstractProcessor {
     private static final String FEATURE_PREFIX = "http://xml.org/sax/features/";
     public static final String ENABLE_NAMESPACES_FEATURE = FEATURE_PREFIX + "namespaces";
     public static final String ENABLE_NAMESPACE_PREFIXES_FEATURE = FEATURE_PREFIX + "namespace-prefixes";
-    private static final SAXParserFactory saxParserFactory = SAXParserFactory.
-            newInstance();
+    private static final SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
 
     static {
         saxParserFactory.setNamespaceAware(true);
@@ -103,8 +102,7 @@ public class SplitXml extends AbstractProcessor {
             saxParserFactory.setFeature(ENABLE_NAMESPACE_PREFIXES_FEATURE, true);
         } catch (Exception e) {
             final Logger staticLogger = LoggerFactory.getLogger(SplitXml.class);
-            staticLogger.
-                    warn("Unable to configure SAX Parser to make namespaces available", e);
+            staticLogger.warn("Unable to configure SAX Parser to make namespaces available", e);
         }
     }
 
@@ -138,8 +136,7 @@ public class SplitXml extends AbstractProcessor {
             return;
         }
 
-        final int depth = context.getProperty(SPLIT_DEPTH).
-                asInteger();
+        final int depth = context.getProperty(SPLIT_DEPTH).asInteger();
         final ProcessorLog logger = getLogger();
 
         final List<FlowFile> splits = new ArrayList<>();
@@ -169,8 +166,7 @@ public class SplitXml extends AbstractProcessor {
                         reader.setContentHandler(parser);
                         reader.parse(new InputSource(in));
                     } catch (final ParserConfigurationException | SAXException e) {
-                        logger.
-                                error("Unable to parse {} due to {}", new Object[]{original, e});
+                        logger.error("Unable to parse {} due to {}", new Object[]{original, e});
                         failed.set(true);
                     }
                 }
@@ -183,9 +179,7 @@ public class SplitXml extends AbstractProcessor {
         } else {
             session.transfer(splits, REL_SPLIT);
             session.transfer(original, REL_ORIGINAL);
-            logger.
-                    info("Split {} into {} FlowFiles", new Object[]{original, splits.
-                        size()});
+            logger.info("Split {} into {} FlowFiles", new Object[]{original, splits.size()});
         }
     }
 
@@ -247,9 +241,7 @@ public class SplitXml extends AbstractProcessor {
             // if we're at a level where we care about capturing text, then add the closing element
             if (newDepth >= splitDepth) {
                 // Add the element end tag.
-                sb.append("</").
-                        append(qName).
-                        append(">");
+                sb.append("</").append(qName).append(">");
             }
 
             // If we have now returned to level 1, we have finished processing
@@ -301,14 +293,8 @@ public class SplitXml extends AbstractProcessor {
                 int attCount = atts.getLength();
                 for (int i = 0; i < attCount; i++) {
                     String attName = atts.getQName(i);
-                    String attValue = StringEscapeUtils.escapeXml10(atts.
-                            getValue(i));
-                    sb.append(" ").
-                            append(attName).
-                            append("=").
-                            append("\"").
-                            append(attValue).
-                            append("\"");
+                    String attValue = StringEscapeUtils.escapeXml10(atts.getValue(i));
+                    sb.append(" ").append(attName).append("=").append("\"").append(attValue).append("\"");
                 }
 
                 sb.append(">");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
index 3451516..2abf4a1 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
@@ -69,21 +69,21 @@ import org.apache.nifi.util.Tuple;
         description = "These XSLT parameters are passed to the transformer")
 public class TransformXml extends AbstractProcessor {
 
-    public static final PropertyDescriptor XSLT_FILE_NAME = new PropertyDescriptor.Builder().
-            name("XSLT file name").
-            description("Provides the name (including full path) of the XSLT file to apply to the flowfile XML content.").
-            required(true).
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("The FlowFile with transformed content will be routed to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship").
-            build();
+    public static final PropertyDescriptor XSLT_FILE_NAME = new PropertyDescriptor.Builder()
+            .name("XSLT file name")
+            .description("Provides the name (including full path) of the XSLT file to apply to the flowfile XML content.")
+            .required(true)
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("The FlowFile with transformed content will be routed to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -113,13 +113,12 @@ public class TransformXml extends AbstractProcessor {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .name(propertyDescriptorName).
-                expressionLanguageSupported(true).
-                addValidator(StandardValidators.
-                        createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING, true)).
-                required(false).
-                dynamic(true).
-                build();
+                .name(propertyDescriptorName)
+                .expressionLanguageSupported(true)
+                .addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING, true))
+                .required(false)
+                .dynamic(true)
+                .build();
     }
 
     @Override
@@ -139,26 +138,17 @@ public class TransformXml extends AbstractProcessor {
                         public void process(final InputStream rawIn, final OutputStream out) throws IOException {
                             try (final InputStream in = new BufferedInputStream(rawIn)) {
 
-                                File stylesheet = new File(context.
-                                        getProperty(XSLT_FILE_NAME).
-                                        getValue());
+                                File stylesheet = new File(context.getProperty(XSLT_FILE_NAME).getValue());
                                 StreamSource styleSource = new StreamSource(stylesheet);
                                 TransformerFactory tfactory = new net.sf.saxon.TransformerFactoryImpl();
-                                Transformer transformer = tfactory.
-                                newTransformer(styleSource);
+                                Transformer transformer = tfactory.newTransformer(styleSource);
 
                                 // pass all dynamic properties to the transformer
-                                for (final Map.Entry<PropertyDescriptor, String> entry : context.
-                                getProperties().
+                                for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().
                                 entrySet()) {
-                                    if (entry.getKey().
-                                    isDynamic()) {
-                                        String value = context.
-                                        newPropertyValue(entry.getValue()).
-                                        evaluateAttributeExpressions(original).
-                                        getValue();
-                                        transformer.setParameter(entry.getKey().
-                                                getName(), value);
+                                    if (entry.getKey().isDynamic()) {
+                                        String value = context.newPropertyValue(entry.getValue()).evaluateAttributeExpressions(original).getValue();
+                                        transformer.setParameter(entry.getKey().getName(), value);
                                     }
                                 }
 
@@ -172,13 +162,10 @@ public class TransformXml extends AbstractProcessor {
                         }
                     });
             session.transfer(transformed, REL_SUCCESS);
-            session.getProvenanceReporter().
-                    modifyContent(transformed, stopWatch.
-                            getElapsed(TimeUnit.MILLISECONDS));
+            session.getProvenanceReporter().modifyContent(transformed, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             logger.info("Transformed {}", new Object[]{original});
         } catch (ProcessException e) {
-            logger.
-                    error("Unable to transform {} due to {}", new Object[]{original, e});
+            logger.error("Unable to transform {} due to {}", new Object[]{original, e});
             session.transfer(original, REL_FAILURE);
         }
     }
@@ -191,8 +178,7 @@ public class TransformXml extends AbstractProcessor {
         @Override
         public ValidationResult validate(final String subject, final String input, final ValidationContext validationContext) {
             final Tuple<String, ValidationResult> lastResult = this.cachedResult;
-            if (lastResult != null && lastResult.getKey().
-                    equals(input)) {
+            if (lastResult != null && lastResult.getKey().equals(input)) {
                 return lastResult.getValue();
             } else {
                 String error = null;
@@ -206,13 +192,12 @@ public class TransformXml extends AbstractProcessor {
                     error = e.toString();
                 }
 
-                this.cachedResult = new Tuple<>(input,
-                        new ValidationResult.Builder()
-                        .input(input).
-                        subject(subject).
-                        valid(error == null).
-                        explanation(error).
-                        build());
+                this.cachedResult = new Tuple<>(input, new ValidationResult.Builder()
+                        .input(input)
+                        .subject(subject)
+                        .valid(error == null)
+                        .explanation(error)
+                        .build());
                 return this.cachedResult.getValue();
             }
         }


[31/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserEntity.java
index 963e853..71554dd 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.UserDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a UserDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a UserDTO.
  */
 @XmlRootElement(name = "userEntity")
 public class UserEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserGroupEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserGroupEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserGroupEntity.java
index a6542c8..35a88d5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserGroupEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserGroupEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.UserGroupDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a UserGroupDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a UserGroupDTO.
  */
 @XmlRootElement(name = "userGroupEntity")
 public class UserGroupEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserSearchResultsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserSearchResultsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserSearchResultsEntity.java
index eece172..fcdeee3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserSearchResultsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UserSearchResultsEntity.java
@@ -22,9 +22,8 @@ import org.apache.nifi.web.api.dto.search.UserGroupSearchResultDTO;
 import org.apache.nifi.web.api.dto.search.UserSearchResultDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to UserSearchResultDTOs and UserGroupSearchResultDTOs.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to UserSearchResultDTOs and
+ * UserGroupSearchResultDTOs.
  */
 @XmlRootElement(name = "userSearchResultsEntity")
 public class UserSearchResultsEntity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UsersEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UsersEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UsersEntity.java
index 180b650..4acc7cb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UsersEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/UsersEntity.java
@@ -24,9 +24,7 @@ import org.apache.nifi.web.api.dto.UserDTO;
 import org.apache.nifi.web.api.dto.util.TimeAdapter;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a collection of UserDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a collection of UserDTO.
  */
 @XmlRootElement(name = "usersEntity")
 public class UsersEntity extends Entity {


[04/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
index 8b9b2ac..6012b04 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
@@ -44,15 +44,13 @@ public class TestHandleHttpRequest {
 
     @Test
     public void testRequestAddedToService() throws InitializationException, MalformedURLException, IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(HandleHttpRequest.class);
+        final TestRunner runner = TestRunners.newTestRunner(HandleHttpRequest.class);
         runner.setProperty(HandleHttpRequest.PORT, "0");
 
         final MockHttpContextMap contextMap = new MockHttpContextMap();
         runner.addControllerService("http-context-map", contextMap);
         runner.enableControllerService(contextMap);
-        runner.
-                setProperty(HandleHttpRequest.HTTP_CONTEXT_MAP, "http-context-map");
+        runner.setProperty(HandleHttpRequest.HTTP_CONTEXT_MAP, "http-context-map");
 
         // trigger processor to stop but not shutdown.
         runner.run(1, false);
@@ -61,8 +59,7 @@ public class TestHandleHttpRequest {
                 @Override
                 public void run() {
                     try {
-                        final int port = ((HandleHttpRequest) runner.
-                                getProcessor()).getPort();
+                        final int port = ((HandleHttpRequest) runner.getProcessor()).getPort();
                         final HttpURLConnection connection = (HttpURLConnection) new URL("http://localhost:" + port + "/my/path?query=true&value1=value1&value2=&value3&value4=apple=orange").
                                 openConnection();
                         connection.setDoOutput(false);
@@ -73,8 +70,7 @@ public class TestHandleHttpRequest {
                         connection.setConnectTimeout(3000);
                         connection.setReadTimeout(3000);
 
-                        StreamUtils.
-                                copy(connection.getInputStream(), new NullOutputStream());
+                        StreamUtils.copy(connection.getInputStream(), new NullOutputStream());
                     } catch (final Throwable t) {
                         t.printStackTrace();
                         Assert.fail(t.toString());
@@ -92,13 +88,10 @@ public class TestHandleHttpRequest {
             // process the request.
             runner.run(1, false);
 
-            runner.
-                    assertAllFlowFilesTransferred(HandleHttpRequest.REL_SUCCESS, 1);
+            runner.assertAllFlowFilesTransferred(HandleHttpRequest.REL_SUCCESS, 1);
             assertEquals(1, contextMap.size());
 
-            final MockFlowFile mff = runner.
-                    getFlowFilesForRelationship(HandleHttpRequest.REL_SUCCESS).
-                    get(0);
+            final MockFlowFile mff = runner.getFlowFilesForRelationship(HandleHttpRequest.REL_SUCCESS).get(0);
             mff.assertAttributeEquals("http.query.param.query", "true");
             mff.assertAttributeEquals("http.query.param.value1", "value1");
             mff.assertAttributeEquals("http.query.param.value2", "");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpResponse.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpResponse.java
index 40683ae..2bceda6 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpResponse.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpResponse.java
@@ -51,14 +51,12 @@ public class TestHandleHttpResponse {
 
     @Test
     public void testEnsureCompleted() throws InitializationException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(HandleHttpResponse.class);
+        final TestRunner runner = TestRunners.newTestRunner(HandleHttpResponse.class);
 
         final MockHttpContextMap contextMap = new MockHttpContextMap("my-id");
         runner.addControllerService("http-context-map", contextMap);
         runner.enableControllerService(contextMap);
-        runner.
-                setProperty(HandleHttpResponse.HTTP_CONTEXT_MAP, "http-context-map");
+        runner.setProperty(HandleHttpResponse.HTTP_CONTEXT_MAP, "http-context-map");
         runner.setProperty(HandleHttpResponse.STATUS_CODE, "${status.code}");
         runner.setProperty("my-attr", "${my-attr}");
         runner.setProperty("no-valid-attr", "${no-valid-attr}");
@@ -104,47 +102,42 @@ public class TestHandleHttpResponse {
         @Override
         public HttpServletResponse getResponse(final String identifier) {
             if (!id.equals(identifier)) {
-                Assert.
-                        fail("attempting to respond to wrong request; should have been " + id + " but was " + identifier);
+                Assert.fail("attempting to respond to wrong request; should have been " + id + " but was " + identifier);
             }
 
             try {
-                final HttpServletResponse response = Mockito.
-                        mock(HttpServletResponse.class);
-                Mockito.when(response.getOutputStream()).
-                        thenReturn(new ServletOutputStream() {
-                            @Override
-                            public boolean isReady() {
-                                return true;
-                            }
-
-                            @Override
-                            public void setWriteListener(WriteListener writeListener) {
-                            }
-
-                            @Override
-                            public void write(int b) throws IOException {
-                                baos.write(b);
-                            }
-
-                            @Override
-                            public void write(byte[] b) throws IOException {
-                                baos.write(b);
-                            }
-
-                            @Override
-                            public void write(byte[] b, int off, int len) throws IOException {
-                                baos.write(b, off, len);
-                            }
-                        });
+                final HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+                Mockito.when(response.getOutputStream()).thenReturn(new ServletOutputStream() {
+                    @Override
+                    public boolean isReady() {
+                        return true;
+                    }
+
+                    @Override
+                    public void setWriteListener(WriteListener writeListener) {
+                    }
+
+                    @Override
+                    public void write(int b) throws IOException {
+                        baos.write(b);
+                    }
+
+                    @Override
+                    public void write(byte[] b) throws IOException {
+                        baos.write(b);
+                    }
+
+                    @Override
+                    public void write(byte[] b, int off, int len) throws IOException {
+                        baos.write(b, off, len);
+                    }
+                });
 
                 Mockito.doAnswer(new Answer<Object>() {
                     @Override
                     public Object answer(final InvocationOnMock invocation) throws Throwable {
-                        final String key = invocation.
-                                getArgumentAt(0, String.class);
-                        final String value = invocation.
-                                getArgumentAt(1, String.class);
+                        final String key = invocation.getArgumentAt(0, String.class);
+                        final String value = invocation.getArgumentAt(1, String.class);
                         if (value == null) {
                             headersWithNoValue.add(key);
                         } else {
@@ -153,10 +146,7 @@ public class TestHandleHttpResponse {
 
                         return null;
                     }
-                }).
-                        when(response).
-                        setHeader(Mockito.any(String.class), Mockito.
-                                any(String.class));
+                }).when(response).setHeader(Mockito.any(String.class), Mockito.any(String.class));
 
                 Mockito.doAnswer(new Answer<Object>() {
                     @Override
@@ -164,9 +154,7 @@ public class TestHandleHttpResponse {
                         statusCode = invocation.getArgumentAt(0, int.class);
                         return null;
                     }
-                }).
-                        when(response).
-                        setStatus(Mockito.anyInt());
+                }).when(response).setStatus(Mockito.anyInt());
 
                 return response;
             } catch (final Exception e) {
@@ -179,8 +167,7 @@ public class TestHandleHttpResponse {
         @Override
         public void complete(final String identifier) {
             if (!id.equals(identifier)) {
-                Assert.
-                        fail("attempting to respond to wrong request; should have been " + id + " but was " + identifier);
+                Assert.fail("attempting to respond to wrong request; should have been " + id + " but was " + identifier);
             }
 
             completedCount.incrementAndGet();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashAttribute.java
index a57f6cf..7426e9e 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashAttribute.java
@@ -36,8 +36,7 @@ public class TestHashAttribute {
     @Test
     public void test() {
         final TestRunner runner = TestRunners.newTestRunner(new HashAttribute());
-        runner.
-                setProperty(HashAttribute.HASH_VALUE_ATTRIBUTE.getName(), "hashValue");
+        runner.setProperty(HashAttribute.HASH_VALUE_ATTRIBUTE.getName(), "hashValue");
         runner.setProperty("MDKey1", ".*");
         runner.setProperty("MDKey2", "(.).*");
 
@@ -67,9 +66,8 @@ public class TestHashAttribute {
         runner.assertTransferCount(HashAttribute.REL_FAILURE, 1);
         runner.assertTransferCount(HashAttribute.REL_SUCCESS, 4);
 
-        final List<MockFlowFile> success = runner.
-                getFlowFilesForRelationship(HashAttribute.REL_SUCCESS);
-        final Map<String, Integer> correlationCount = new HashMap<String, Integer>();
+        final List<MockFlowFile> success = runner.getFlowFilesForRelationship(HashAttribute.REL_SUCCESS);
+        final Map<String, Integer> correlationCount = new HashMap<>();
         for (final MockFlowFile flowFile : success) {
             final String correlationId = flowFile.getAttribute("hashValue");
             assertNotNull(correlationId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashContent.java
index 8f6f5f4..d14683c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHashContent.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.HashContent;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
@@ -59,9 +58,7 @@ public class TestHashContent {
         runner.assertQueueEmpty();
         runner.assertAllFlowFilesTransferred(HashContent.REL_SUCCESS, 1);
 
-        final MockFlowFile outFile = runner.
-                getFlowFilesForRelationship(HashContent.REL_SUCCESS).
-                get(0);
+        final MockFlowFile outFile = runner.getFlowFilesForRelationship(HashContent.REL_SUCCESS).get(0);
         final String hashValue = outFile.getAttribute("hash");
 
         assertEquals(expectedHash, hashValue);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestIdentifyMimeType.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestIdentifyMimeType.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestIdentifyMimeType.java
index 9f49476..0094cb0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestIdentifyMimeType.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestIdentifyMimeType.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.IdentifyMimeType;
 import static org.junit.Assert.assertEquals;
 
 import java.io.File;
@@ -36,8 +35,7 @@ public class TestIdentifyMimeType {
 
     @Test
     public void testFiles() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new IdentifyMimeType());
+        final TestRunner runner = TestRunners.newTestRunner(new IdentifyMimeType());
 
         final File dir = new File("src/test/resources/TestIdentifyMimeType");
         final File[] files = dir.listFiles();
@@ -54,8 +52,7 @@ public class TestIdentifyMimeType {
         runner.setThreadCount(1);
         runner.run(fileCount);
 
-        runner.
-                assertAllFlowFilesTransferred(IdentifyMimeType.REL_SUCCESS, fileCount);
+        runner.assertAllFlowFilesTransferred(IdentifyMimeType.REL_SUCCESS, fileCount);
 
         final Map<String, String> expectedMimeTypes = new HashMap<>();
         expectedMimeTypes.put("1.7z", "application/x-7z-compressed");
@@ -93,13 +90,10 @@ public class TestIdentifyMimeType {
         expectedExtensions.put("flowfilev3", "");
         expectedExtensions.put("flowfilev1.tar", "");
 
-        final List<MockFlowFile> filesOut = runner.
-                getFlowFilesForRelationship(IdentifyMimeType.REL_SUCCESS);
+        final List<MockFlowFile> filesOut = runner.getFlowFilesForRelationship(IdentifyMimeType.REL_SUCCESS);
         for (final MockFlowFile file : filesOut) {
-            final String filename = file.getAttribute(CoreAttributes.FILENAME.
-                    key());
-            final String mimeType = file.getAttribute(CoreAttributes.MIME_TYPE.
-                    key());
+            final String filename = file.getAttribute(CoreAttributes.FILENAME.key());
+            final String mimeType = file.getAttribute(CoreAttributes.MIME_TYPE.key());
             final String expected = expectedMimeTypes.get(filename);
 
             final String extension = file.getAttribute("mime.extension");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestInvokeHTTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestInvokeHTTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestInvokeHTTP.java
index 03fd14b..2f8dea9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestInvokeHTTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestInvokeHTTP.java
@@ -145,9 +145,7 @@ public class TestInvokeHTTP {
 
         //expected in request status.code and status.message
         //original flow file (+attributes)??????????
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).get(0);
         bundle.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
@@ -159,15 +157,12 @@ public class TestInvokeHTTP {
         //status code, status message, all headers from server response --> ff attributes
         //server response message body into payload of ff
         //should not contain any original ff attributes
-        final MockFlowFile bundle1 = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).
-                get(0);
+        final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).get(0);
         bundle1.assertContentEquals("/status/200".getBytes("UTF-8"));
         bundle1.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle1.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
         bundle1.assertAttributeEquals("Foo", "Bar");
-        bundle1.
-                assertAttributeEquals("Content-Type", "text/plain; charset=ISO-8859-1");
+        bundle1.assertAttributeEquals("Content-Type", "text/plain; charset=ISO-8859-1");
         final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
         final String expected1 = "/status/200";
         Assert.assertEquals(expected1, actual1);
@@ -190,9 +185,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_FAILURE, 0);
 
         //expected in response
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_RETRY).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_RETRY).get(0);
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
         bundle.assertAttributeEquals(Config.STATUS_CODE, "500");
         bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "Server Error");
@@ -220,9 +213,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_FAILURE, 0);
         //getMyFlowFiles();
         //expected in response
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_NO_RETRY).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_NO_RETRY).get(0);
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
 
         bundle.assertAttributeEquals(Config.STATUS_CODE, "302");
@@ -249,9 +240,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_FAILURE, 0);
         //getMyFlowFiles();
         //expected in response
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_NO_RETRY).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_NO_RETRY).get(0);
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
 
         bundle.assertAttributeEquals(Config.STATUS_CODE, "304");
@@ -278,9 +267,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_FAILURE, 0);
         //getMyFlowFiles();
         //expected in response
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_NO_RETRY).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_NO_RETRY).get(0);
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
 
         bundle.assertAttributeEquals(Config.STATUS_CODE, "400");
@@ -309,14 +296,11 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_FAILURE, 0);
 
         //expected in response
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_NO_RETRY).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_NO_RETRY).get(0);
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
 
         bundle.assertAttributeEquals(Config.STATUS_CODE, "412");
-        bundle.
-                assertAttributeEquals(Config.STATUS_MESSAGE, "Precondition Failed");
+        bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "Precondition Failed");
         bundle.assertAttributeEquals(Config.RESPONSE_BODY, "/status/412");
         final String expected = "Hello";
         Assert.assertEquals(expected, actual);
@@ -340,9 +324,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_NO_RETRY, 0);
         runner.assertTransferCount(Config.REL_FAILURE, 0);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).get(0);
         bundle.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
@@ -350,9 +332,7 @@ public class TestInvokeHTTP {
         Assert.assertEquals(expected, actual);
         bundle.assertAttributeEquals("Foo", "Bar");
 
-        final MockFlowFile bundle1 = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).
-                get(0);
+        final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).get(0);
         bundle1.assertContentEquals("".getBytes("UTF-8"));
         bundle1.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle1.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
@@ -379,9 +359,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_NO_RETRY, 0);
         runner.assertTransferCount(Config.REL_FAILURE, 0);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).get(0);
         bundle.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
@@ -389,9 +367,7 @@ public class TestInvokeHTTP {
         Assert.assertEquals(expected, actual);
         bundle.assertAttributeEquals("Foo", "Bar");
 
-        final MockFlowFile bundle1 = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).
-                get(0);
+        final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).get(0);
         bundle1.assertContentEquals("".getBytes("UTF-8"));
         bundle1.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle1.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
@@ -419,9 +395,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_NO_RETRY, 0);
         runner.assertTransferCount(Config.REL_FAILURE, 0);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_REQ).get(0);
         bundle.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
@@ -429,9 +403,7 @@ public class TestInvokeHTTP {
         Assert.assertEquals(expected, actual);
         bundle.assertAttributeEquals("Foo", "Bar");
 
-        final MockFlowFile bundle1 = runner.
-                getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).
-                get(0);
+        final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(Config.REL_SUCCESS_RESP).get(0);
         bundle1.assertContentEquals("".getBytes("UTF-8"));
         bundle1.assertAttributeEquals(Config.STATUS_CODE, "200");
         bundle1.assertAttributeEquals(Config.STATUS_MESSAGE, "OK");
@@ -460,9 +432,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_NO_RETRY, 0);
         runner.assertTransferCount(Config.REL_FAILURE, 1);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_FAILURE).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_FAILURE).get(0);
 
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
         final String expected = "Hello";
@@ -470,54 +440,6 @@ public class TestInvokeHTTP {
         bundle.assertAttributeEquals("Foo", "Bar");
     }
 
-    //  @Test
-    //  public void testGetFlowfileAttributes() throws IOException {
-    //      Map<String, List<String>> input = new HashMap<>();
-    //      input.put("A", Arrays.asList("1"));
-    //      input.put("B", Arrays.asList("1", "2", "3"));
-    //      input.put("C", new ArrayList<String>());
-    //      input.put("D", null);
-    //
-    //      Map<String, String> expected = new HashMap<>();
-    //      expected.put(Config.STATUS_CODE, "200");
-    //      expected.put(Config.STATUS_MESSAGE, "OK");
-    //      expected.put(Config.STATUS_LINE, "HTTP/1.1 200 OK");
-    //      expected.put("A", "1");
-    //      expected.put("B", "1, 2, 3");
-    //
-    //      URL url = new URL("file:/dev/null");
-    //      HttpURLConnection conn = new MockHttpURLConnection(url, 200, "OK", input);
-    //
-    //      Map<String, String> actual = processor.getAttributesFromHeaders(conn);
-    //
-    //      assertEquals(expected, actual);
-    //  }
-    //  @Test
-    //  public void testCsv() {
-    //      // null input should return an empty string
-    //      assertEquals("", processor.csv(null));
-    //
-    //      // empty collection returns empty string
-    //      assertEquals("", processor.csv(new ArrayList<String>()));
-    //
-    //      // pretty normal checks
-    //      assertEquals("1", processor.csv(Arrays.asList("1")));
-    //      assertEquals("1, 2", processor.csv(Arrays.asList("1", "2")));
-    //      assertEquals("1, 2, 3", processor.csv(Arrays.asList("1", "2", "3")));
-    //
-    //      // values should be trimmed
-    //      assertEquals("1, 2, 3", processor.csv(Arrays.asList("    1", "    2       ", "3       ")));
-    //
-    //      // empty values should be skipped
-    //      assertEquals("1, 3", processor.csv(Arrays.asList("1", "", "3")));
-    //
-    //      // whitespace values should be skipped
-    //      assertEquals("1, 3", processor.csv(Arrays.asList("1", "      ", "3")));
-    //
-    //      // this (mis)behavior is currently expected, embedded comma delimiters are not escaped
-    //      // note the embedded unescaped comma in the "1, " value
-    //      assertEquals("1,, 2, 3", processor.csv(Arrays.asList("1, ", "2", "3")));
-    //  }
     @Test
     public void testConnectFailBadHost() throws Exception {
         addHandler(new GetOrHeadHandler());
@@ -534,9 +456,7 @@ public class TestInvokeHTTP {
         runner.assertTransferCount(Config.REL_NO_RETRY, 0);
         runner.assertTransferCount(Config.REL_FAILURE, 1);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(Config.REL_FAILURE).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(Config.REL_FAILURE).get(0);
 
         final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
         final String expected = "Hello";
@@ -545,16 +465,12 @@ public class TestInvokeHTTP {
     }
 
     private static Map<String, String> createSslProperties() {
-        Map<String, String> map = new HashMap<String, String>();
-        map.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        map.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        Map<String, String> map = new HashMap<>();
+        map.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        map.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         map.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
-        map.
-                put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
-        map.
-                put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
+        map.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
+        map.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
         map.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         return map;
     }
@@ -582,8 +498,7 @@ public class TestInvokeHTTP {
 
             assertEquals("/post", target);
 
-            String body = request.getReader().
-                    readLine();
+            String body = request.getReader().readLine();
             assertEquals("Hello", body);
 
         }
@@ -595,8 +510,7 @@ public class TestInvokeHTTP {
         public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
             baseRequest.setHandled(true);
 
-            int status = Integer.valueOf(target.
-                    substring("/status".length() + 1));
+            int status = Integer.valueOf(target.substring("/status".length() + 1));
             response.setStatus(status);
 
             response.setContentType("text/plain");
@@ -625,8 +539,7 @@ public class TestInvokeHTTP {
 
             response.setStatus(200);
             response.setContentType("text/plain");
-            response.getWriter().
-                    println("Way to go!");
+            response.getWriter().println("Way to go!");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestJmsConsumer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestJmsConsumer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestJmsConsumer.java
index 8511b50..274333e 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestJmsConsumer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestJmsConsumer.java
@@ -56,8 +56,7 @@ public class TestJmsConsumer {
     }
 
     /**
-     * Test method for
-     * {@link org.apache.nifi.processors.standard.JmsConsumer#createMapMessageAttrs(javax.jms.MapMessage)}.
+     * Test method for {@link org.apache.nifi.processors.standard.JmsConsumer#createMapMessageAttrs(javax.jms.MapMessage)}.
      *
      * @throws JMSException jms
      */
@@ -66,17 +65,12 @@ public class TestJmsConsumer {
 
         MapMessage mapMessage = createMapMessage();
 
-        Map<String, String> mapMessageValues = JmsConsumer.
-                createMapMessageValues(mapMessage);
+        Map<String, String> mapMessageValues = JmsConsumer.createMapMessageValues(mapMessage);
         assertEquals("", 4, mapMessageValues.size());
-        assertEquals("", "Arnold", mapMessageValues.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "name"));
-        assertEquals("", "97", mapMessageValues.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "age"));
-        assertEquals("", "89686.564", mapMessageValues.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "xyz"));
-        assertEquals("", "true", mapMessageValues.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "good"));
+        assertEquals("", "Arnold", mapMessageValues.get(JmsConsumer.MAP_MESSAGE_PREFIX + "name"));
+        assertEquals("", "97", mapMessageValues.get(JmsConsumer.MAP_MESSAGE_PREFIX + "age"));
+        assertEquals("", "89686.564", mapMessageValues.get(JmsConsumer.MAP_MESSAGE_PREFIX + "xyz"));
+        assertEquals("", "true", mapMessageValues.get(JmsConsumer.MAP_MESSAGE_PREFIX + "good"));
     }
 
     /**
@@ -91,28 +85,18 @@ public class TestJmsConsumer {
         MapMessage mapMessage = createMapMessage();
 
         ProcessContext context = runner.getProcessContext();
-        ProcessSession session = runner.getProcessSessionFactory().
-                createSession();
-        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.
-                getProcessor(),
-                (MockProcessContext) runner.getProcessContext());
-
-        JmsProcessingSummary summary = JmsConsumer.
-                map2FlowFile(context, session, mapMessage, true, pic.getLogger());
-
-        assertEquals("MapMessage should not create FlowFile content", 0, summary.
-                getBytesReceived());
-
-        Map<String, String> attributes = summary.getLastFlowFile().
-                getAttributes();
-        assertEquals("", "Arnold", attributes.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "name"));
-        assertEquals("", "97", attributes.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "age"));
-        assertEquals("", "89686.564", attributes.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "xyz"));
-        assertEquals("", "true", attributes.
-                get(JmsConsumer.MAP_MESSAGE_PREFIX + "good"));
+        ProcessSession session = runner.getProcessSessionFactory().createSession();
+        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.getProcessor(), (MockProcessContext) runner.getProcessContext());
+
+        JmsProcessingSummary summary = JmsConsumer.map2FlowFile(context, session, mapMessage, true, pic.getLogger());
+
+        assertEquals("MapMessage should not create FlowFile content", 0, summary.getBytesReceived());
+
+        Map<String, String> attributes = summary.getLastFlowFile().getAttributes();
+        assertEquals("", "Arnold", attributes.get(JmsConsumer.MAP_MESSAGE_PREFIX + "name"));
+        assertEquals("", "97", attributes.get(JmsConsumer.MAP_MESSAGE_PREFIX + "age"));
+        assertEquals("", "89686.564", attributes.get(JmsConsumer.MAP_MESSAGE_PREFIX + "xyz"));
+        assertEquals("", "true", attributes.get(JmsConsumer.MAP_MESSAGE_PREFIX + "good"));
     }
 
     @Test
@@ -125,19 +109,12 @@ public class TestJmsConsumer {
         textMessage.setText(payload);
 
         ProcessContext context = runner.getProcessContext();
-        ProcessSession session = runner.getProcessSessionFactory().
-                createSession();
-        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.
-                getProcessor(),
-                (MockProcessContext) runner.getProcessContext());
+        ProcessSession session = runner.getProcessSessionFactory().createSession();
+        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.getProcessor(), (MockProcessContext) runner.getProcessContext());
 
-        JmsProcessingSummary summary = JmsConsumer.
-                map2FlowFile(context, session, textMessage, true, pic.
-                        getLogger());
+        JmsProcessingSummary summary = JmsConsumer.map2FlowFile(context, session, textMessage, true, pic.getLogger());
 
-        assertEquals("TextMessage content length should equal to FlowFile content size", payload.
-                length(), summary.getLastFlowFile().
-                getSize());
+        assertEquals("TextMessage content length should equal to FlowFile content size", payload.length(), summary.getLastFlowFile().getSize());
 
         final byte[] buffer = new byte[payload.length()];
         runner.clearTransferState();
@@ -155,6 +132,8 @@ public class TestJmsConsumer {
 
     /**
      * Test BytesMessage to FlowFile conversion
+     *
+     * @throws java.lang.Exception ex
      */
     @Test
     public void testMap2FlowFileBytesMessage() throws Exception {
@@ -168,19 +147,12 @@ public class TestJmsConsumer {
         bytesMessage.reset();
 
         ProcessContext context = runner.getProcessContext();
-        ProcessSession session = runner.getProcessSessionFactory().
-                createSession();
-        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.
-                getProcessor(),
-                (MockProcessContext) runner.getProcessContext());
-
-        JmsProcessingSummary summary = JmsConsumer.
-                map2FlowFile(context, session, bytesMessage, true, pic.
-                        getLogger());
-
-        assertEquals("BytesMessage content length should equal to FlowFile content size", payload.length, summary.
-                getLastFlowFile().
-                getSize());
+        ProcessSession session = runner.getProcessSessionFactory().createSession();
+        ProcessorInitializationContext pic = new MockProcessorInitializationContext(runner.getProcessor(), (MockProcessContext) runner.getProcessContext());
+
+        JmsProcessingSummary summary = JmsConsumer.map2FlowFile(context, session, bytesMessage, true, pic.getLogger());
+
+        assertEquals("BytesMessage content length should equal to FlowFile content size", payload.length, summary.getLastFlowFile().getSize());
 
         final byte[] buffer = new byte[payload.length];
         runner.clearTransferState();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenUDP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenUDP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenUDP.java
index d4d5524..864d7a7 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenUDP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestListenUDP.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ListenUDP;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
@@ -51,10 +50,8 @@ public class TestListenUDP {
         System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
         System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
         System.setProperty("org.slf4j.simpleLogger.log.nifi.io.nio", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.ListenUDP", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestListenUDP", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.ListenUDP", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestListenUDP", "debug");
         LOGGER = LoggerFactory.getLogger(TestListenUDP.class);
     }
 
@@ -88,8 +85,7 @@ public class TestListenUDP {
 
         ProcessContext context = runner.getProcessContext();
         ListenUDP processor = (ListenUDP) runner.getProcessor();
-        ProcessSessionFactory processSessionFactory = runner.
-                getProcessSessionFactory();
+        ProcessSessionFactory processSessionFactory = runner.getProcessSessionFactory();
         processor.initializeChannelListenerAndConsumerProcessing(context);
         udpSender.start();
         boolean transferred = false;
@@ -97,18 +93,14 @@ public class TestListenUDP {
         while (!transferred && System.currentTimeMillis() < timeOut) {
             Thread.sleep(200);
             processor.onTrigger(context, processSessionFactory);
-            transferred = runner.
-                    getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).
-                    size() > 0;
+            transferred = runner.getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).size() > 0;
         }
         assertTrue("Didn't process the datagrams", transferred);
         Thread.sleep(7000);
         processor.stopping();
         processor.stopped();
         socket.close();
-        assertTrue(runner.
-                getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).
-                size() >= 60);
+        assertTrue(runner.getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).size() >= 60);
     }
 
     @Test
@@ -129,8 +121,7 @@ public class TestListenUDP {
 
         ProcessContext context = runner.getProcessContext();
         ListenUDP processor = (ListenUDP) runner.getProcessor();
-        ProcessSessionFactory processSessionFactory = runner.
-                getProcessSessionFactory();
+        ProcessSessionFactory processSessionFactory = runner.getProcessSessionFactory();
         processor.initializeChannelListenerAndConsumerProcessing(context);
         udpSender.start();
         boolean transferred = false;
@@ -138,18 +129,14 @@ public class TestListenUDP {
         while (!transferred && System.currentTimeMillis() < timeOut) {
             Thread.sleep(1000);
             processor.onTrigger(context, processSessionFactory);
-            transferred = runner.
-                    getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).
-                    size() > 0;
+            transferred = runner.getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).size() > 0;
         }
         assertTrue("Didn't process the datagrams", transferred);
         Thread.sleep(7000);
         processor.stopping();
         processor.stopped();
         socket.close();
-        assertTrue(runner.
-                getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).
-                size() >= 2);
+        assertTrue(runner.getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).size() >= 2);
     }
 
     @Test
@@ -166,8 +153,7 @@ public class TestListenUDP {
 
         ProcessContext context = runner.getProcessContext();
         ListenUDP processor = (ListenUDP) runner.getProcessor();
-        ProcessSessionFactory processSessionFactory = runner.
-                getProcessSessionFactory();
+        ProcessSessionFactory processSessionFactory = runner.getProcessSessionFactory();
         processor.initializeChannelListenerAndConsumerProcessing(context);
         udpSender.start();
         int numTransfered = 0;
@@ -175,9 +161,7 @@ public class TestListenUDP {
         while (numTransfered <= 80 && System.currentTimeMillis() < timeout) {
             Thread.sleep(200);
             processor.onTrigger(context, processSessionFactory);
-            numTransfered = runner.
-                    getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).
-                    size();
+            numTransfered = runner.getFlowFilesForRelationship(ListenUDP.RELATIONSHIP_SUCCESS).size();
         }
         assertFalse("Did not process all the datagrams", numTransfered < 80);
         processor.stopping();
@@ -215,8 +199,7 @@ public class TestListenUDP {
                 }
                 final long endTime = System.nanoTime();
                 final long durationMillis = (endTime - startTime) / 1000000;
-                LOGGER.
-                        info("Sent all UDP packets without any obvious errors | duration ms= " + durationMillis);
+                LOGGER.info("Sent all UDP packets without any obvious errors | duration ms= " + durationMillis);
             } catch (IOException e) {
                 LOGGER.error("", e);
             } finally {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java
index 48ed8c6..a657453 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java
@@ -47,16 +47,14 @@ public class TestMergeContent {
 
     @BeforeClass
     public static void setup() {
-        System.
-                setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.standard", "DEBUG");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.standard", "DEBUG");
     }
 
     @Test
     public void testSimpleBinaryConcat() throws IOException, InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
 
         createFlowFiles(runner);
         runner.run();
@@ -66,20 +64,16 @@ public class TestMergeContent {
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
         bundle.assertContentEquals("Hello, World!".getBytes("UTF-8"));
-        bundle.
-                assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text");
+        bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text");
     }
 
     @Test
     public void testMimeTypeIsOctetStreamIfConflictingWithBinaryConcat() throws IOException, InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
 
         createFlowFiles(runner);
 
@@ -93,12 +87,9 @@ public class TestMergeContent {
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 4);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
         bundle.assertContentEquals("Hello, World!".getBytes("UTF-8"));
-        bundle.
-                assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/octet-stream");
+        bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/octet-stream");
     }
 
     @Test
@@ -108,10 +99,8 @@ public class TestMergeContent {
         runner.setProperty(MergeContent.MAX_BIN_COUNT, "50");
         runner.setProperty(MergeContent.MIN_ENTRIES, "10");
         runner.setProperty(MergeContent.MAX_ENTRIES, "10");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
-        runner.
-                setProperty(MergeContent.CORRELATION_ATTRIBUTE_NAME, "correlationId");
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        runner.setProperty(MergeContent.CORRELATION_ATTRIBUTE_NAME, "correlationId");
 
         final Map<String, String> attrs = new HashMap<>();
         for (int i = 0; i < 49; i++) {
@@ -143,8 +132,7 @@ public class TestMergeContent {
     @Test
     public void testSimpleBinaryConcatWaitsForMin() throws IOException, InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
         runner.setProperty(MergeContent.MIN_SIZE, "20 KB");
 
         createFlowFiles(runner);
@@ -159,8 +147,7 @@ public class TestMergeContent {
     public void testZip() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_ZIP);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_ZIP);
 
         createFlowFiles(runner);
         runner.run();
@@ -170,12 +157,8 @@ public class TestMergeContent {
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        try (final InputStream rawIn = new ByteArrayInputStream(runner.
-                getContentAsByteArray(bundle));
-                final ZipInputStream in = new ZipInputStream(rawIn)) {
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle)); final ZipInputStream in = new ZipInputStream(rawIn)) {
             Assert.assertNotNull(in.getNextEntry());
             final byte[] part1 = IOUtils.toByteArray(in);
             Assert.assertTrue(Arrays.equals("Hello".getBytes("UTF-8"), part1));
@@ -188,16 +171,14 @@ public class TestMergeContent {
             final byte[] part3 = IOUtils.toByteArray(in);
             Assert.assertTrue(Arrays.equals("World!".getBytes("UTF-8"), part3));
         }
-        bundle.
-                assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/zip");
+        bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/zip");
     }
 
     @Test
     public void testTar() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put(CoreAttributes.MIME_TYPE.key(), "application/plain-text");
@@ -206,9 +187,7 @@ public class TestMergeContent {
         runner.enqueue("Hello".getBytes("UTF-8"), attributes);
         attributes.put(CoreAttributes.FILENAME.key(), "ALongerrrFileName");
         runner.enqueue(", ".getBytes("UTF-8"), attributes);
-        attributes
-                .put(CoreAttributes.FILENAME.key(),
-                        "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName");
+        attributes.put(CoreAttributes.FILENAME.key(), "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName");
         runner.enqueue("World!".getBytes("UTF-8"), attributes);
         runner.run();
 
@@ -217,12 +196,8 @@ public class TestMergeContent {
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        try (final InputStream rawIn = new ByteArrayInputStream(runner.
-                getContentAsByteArray(bundle));
-                final TarArchiveInputStream in = new TarArchiveInputStream(rawIn)) {
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle)); final TarArchiveInputStream in = new TarArchiveInputStream(rawIn)) {
             ArchiveEntry entry = in.getNextEntry();
             Assert.assertNotNull(entry);
             assertEquals("AShortFileName", entry.getName());
@@ -235,13 +210,11 @@ public class TestMergeContent {
             Assert.assertTrue(Arrays.equals(", ".getBytes("UTF-8"), part2));
 
             entry = in.getNextEntry();
-            assertEquals("AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName", entry.
-                    getName());
+            assertEquals("AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName", entry.getName());
             final byte[] part3 = IOUtils.toByteArray(in);
             Assert.assertTrue(Arrays.equals("World!".getBytes("UTF-8"), part3));
         }
-        bundle.
-                assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/tar");
+        bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/tar");
     }
 
     @Test
@@ -250,33 +223,26 @@ public class TestMergeContent {
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
         runner.setProperty(MergeContent.MIN_ENTRIES, "2");
         runner.setProperty(MergeContent.MAX_ENTRIES, "2");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_FLOWFILE_STREAM_V3);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_FLOWFILE_STREAM_V3);
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("path", "folder");
-        runner.enqueue(Paths.
-                get("src/test/resources/TestUnpackContent/folder/cal.txt"), attributes);
-        runner.enqueue(Paths.
-                get("src/test/resources/TestUnpackContent/folder/date.txt"), attributes);
+        runner.enqueue(Paths.get("src/test/resources/TestUnpackContent/folder/cal.txt"), attributes);
+        runner.enqueue(Paths.get("src/test/resources/TestUnpackContent/folder/date.txt"), attributes);
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 2);
 
-        final MockFlowFile merged = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        merged.
-                assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/flowfile-v3");
+        final MockFlowFile merged = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        merged.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/flowfile-v3");
     }
 
     @Test
     public void testDefragment() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -295,18 +261,14 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile assembled = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        assembled.assertContentEquals("A Man A Plan A Canal Panama".
-                getBytes("UTF-8"));
+        final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8"));
     }
 
     @Test
     public void testDefragmentWithTooFewFragments() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "2 secs");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -339,8 +301,7 @@ public class TestMergeContent {
     @Test
     public void testDefragmentOutOfOrder() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -359,19 +320,15 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile assembled = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        assembled.assertContentEquals("A Man A Plan A Canal Panama".
-                getBytes("UTF-8"));
+        final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8"));
     }
 
     @Ignore("this test appears to be faulty")
     @Test
     public void testDefragmentMultipleMingledSegments() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -400,22 +357,16 @@ public class TestMergeContent {
         runner.run(2);
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 2);
-        final MockFlowFile assembled = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        assembled.assertContentEquals("A Man A Plan A Canal Panama".
-                getBytes("UTF-8"));
-        final MockFlowFile assembledTwo = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(1);
+        final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8"));
+        final MockFlowFile assembledTwo = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(1);
         assembledTwo.assertContentEquals("No x in Nixon".getBytes("UTF-8"));
     }
 
     @Test
     public void testDefragmentOldStyleAttributes() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -435,20 +386,15 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile assembled = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        assembled.assertContentEquals("A Man A Plan A Canal Panama".
-                getBytes("UTF-8"));
-        assembled.
-                assertAttributeEquals(CoreAttributes.FILENAME.key(), "originalfilename");
+        final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8"));
+        assembled.assertAttributeEquals(CoreAttributes.FILENAME.key(), "originalfilename");
     }
 
     @Test
     public void testDefragmentMultipleOnTriggers() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put(MergeContent.FRAGMENT_ID_ATTRIBUTE, "1");
@@ -470,19 +416,15 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile assembled = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
-        assembled.assertContentEquals("A Man A Plan A Canal Panama".
-                getBytes("UTF-8"));
+        final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
+        assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8"));
     }
 
     @Ignore("This test appears to be a fail...is retuning 1 instead of 2...needs work")
     @Test
     public void testMergeBasedOnCorrelation() throws IOException, InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_BIN_PACK);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_BIN_PACK);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min");
         runner.setProperty(MergeContent.CORRELATION_ATTRIBUTE_NAME, "attr");
         runner.setProperty(MergeContent.MAX_ENTRIES, "3");
@@ -503,8 +445,7 @@ public class TestMergeContent {
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 2);
 
-        final List<MockFlowFile> mergedFiles = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED);
+        final List<MockFlowFile> mergedFiles = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED);
         final MockFlowFile merged1 = mergedFiles.get(0);
         final MockFlowFile merged2 = mergedFiles.get(1);
 
@@ -526,8 +467,7 @@ public class TestMergeContent {
     @Test
     public void testMaxBinAge() throws InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_BIN_PACK);
+        runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_BIN_PACK);
         runner.setProperty(MergeContent.MAX_BIN_AGE, "2 sec");
         runner.setProperty(MergeContent.CORRELATION_ATTRIBUTE_NAME, "attr");
         runner.setProperty(MergeContent.MAX_ENTRIES, "500");
@@ -552,8 +492,7 @@ public class TestMergeContent {
     @Test
     public void testUniqueAttributes() {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.ATTRIBUTE_STRATEGY, MergeContent.ATTRIBUTE_STRATEGY_ALL_UNIQUE);
+        runner.setProperty(MergeContent.ATTRIBUTE_STRATEGY, MergeContent.ATTRIBUTE_STRATEGY_ALL_UNIQUE);
         runner.setProperty(MergeContent.MAX_SIZE, "2 B");
         runner.setProperty(MergeContent.MIN_SIZE, "2 B");
 
@@ -572,9 +511,7 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile outFile = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
+        final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
 
         outFile.assertAttributeEquals("abc", "xyz");
         outFile.assertAttributeEquals("hello", "good-bye");
@@ -585,8 +522,7 @@ public class TestMergeContent {
     @Test
     public void testCommonAttributesOnly() {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
-        runner.
-                setProperty(MergeContent.ATTRIBUTE_STRATEGY, MergeContent.ATTRIBUTE_STRATEGY_ALL_COMMON);
+        runner.setProperty(MergeContent.ATTRIBUTE_STRATEGY, MergeContent.ATTRIBUTE_STRATEGY_ALL_COMMON);
         runner.setProperty(MergeContent.MAX_SIZE, "2 B");
         runner.setProperty(MergeContent.MIN_SIZE, "2 B");
 
@@ -605,9 +541,7 @@ public class TestMergeContent {
         runner.run();
 
         runner.assertTransferCount(MergeContent.REL_MERGED, 1);
-        final MockFlowFile outFile = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
+        final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
 
         outFile.assertAttributeEquals("abc", "xyz");
         outFile.assertAttributeNotExists("hello");
@@ -615,8 +549,7 @@ public class TestMergeContent {
         outFile.assertAttributeNotExists("xyz");
 
         final Set<String> uuids = new HashSet<>();
-        for (final MockFlowFile mff : runner.
-                getFlowFilesForRelationship(MergeContent.REL_ORIGINAL)) {
+        for (final MockFlowFile mff : runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL)) {
             uuids.add(mff.getAttribute(CoreAttributes.UUID.key()));
         }
         uuids.add(outFile.getAttribute(CoreAttributes.UUID.key()));
@@ -628,8 +561,7 @@ public class TestMergeContent {
     public void testCountAttribute() throws IOException, InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
         runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
-        runner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
 
         createFlowFiles(runner);
         runner.run();
@@ -639,9 +571,7 @@ public class TestMergeContent {
         runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
         runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);
 
-        final MockFlowFile bundle = runner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED).
-                get(0);
+        final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
         bundle.assertContentEquals("Hello, World!".getBytes("UTF-8"));
         bundle.assertAttributeEquals(MergeContent.MERGE_COUNT_ATTRIBUTE, "3");
         bundle.assertAttributeExists(MergeContent.MERGE_BIN_AGE_ATTRIBUTE);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java
index 2c58b80..768a8d0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ModifyBytes;
 import java.io.File;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -41,14 +40,11 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "1 MB");
         runner.setProperty(ModifyBytes.END_OFFSET, "1 MB");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         out.assertContentEquals("".getBytes("UTF-8"));
     }
 
@@ -58,16 +54,12 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "0 MB");
         runner.setProperty(ModifyBytes.END_OFFSET, "0 MB");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/testFile.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/testFile.txt")));
     }
 
     @Test
@@ -76,18 +68,14 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "12 B"); //REMOVE - '<<<HEADER>>>'
         runner.setProperty(ModifyBytes.END_OFFSET, "0 MB");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noHeader.txt")));
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noHeader.txt")));
     }
 
     @Test
@@ -96,14 +84,11 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "181 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "0 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
         out.assertContentEquals("<<<FOOTER>>>".getBytes("UTF-8"));
@@ -115,14 +100,11 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "0 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "181 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         out.assertContentEquals("<<<HEADER>>>".getBytes("UTF-8"));
     }
 
@@ -132,18 +114,14 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "0 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "12 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noFooter.txt")));
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noFooter.txt")));
     }
 
     @Test
@@ -152,18 +130,14 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "12 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "12 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noFooter_noHeader.txt")));
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestModifyBytes/noFooter_noHeader.txt")));
     }
 
     @Test
@@ -172,14 +146,11 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "97 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "97 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         out.assertContentEquals("".getBytes("UTF-8"));
     }
 
@@ -189,14 +160,11 @@ public class TestModifyBytes {
         runner.setProperty(ModifyBytes.START_OFFSET, "94 B");
         runner.setProperty(ModifyBytes.END_OFFSET, "96 B");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestModifyBytes/testFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestModifyBytes/testFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
         out.assertContentEquals("Dew".getBytes("UTF-8"));
@@ -209,15 +177,13 @@ public class TestModifyBytes {
     private byte[] translateNewLines(final Path path) throws IOException {
         final byte[] data = Files.readAllBytes(path);
         final String text = new String(data, StandardCharsets.UTF_8);
-        return translateNewLines(text).
-                getBytes(StandardCharsets.UTF_8);
+        return translateNewLines(text).getBytes(StandardCharsets.UTF_8);
     }
 
     private String translateNewLines(final String text) {
         final String lineSeparator = System.getProperty("line.separator");
         final Pattern pattern = Pattern.compile("\n", Pattern.MULTILINE);
-        final String translated = pattern.matcher(text).
-                replaceAll(lineSeparator);
+        final String translated = pattern.matcher(text).replaceAll(lineSeparator);
         return translated;
     }
 }


[34/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/ReadOnlyAccessControlTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/ReadOnlyAccessControlTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/ReadOnlyAccessControlTest.java
index 6ac8b47..0ab074f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/ReadOnlyAccessControlTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/ReadOnlyAccessControlTest.java
@@ -101,7 +101,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Ensures the admin user can get a groups content.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testGroupGet() throws Exception {
@@ -133,13 +133,12 @@ public class ReadOnlyAccessControlTest {
         Assert.assertEquals(1, processGroupContentsDTO.getInputPorts().size());
         Assert.assertEquals(1, processGroupContentsDTO.getOutputPorts().size());
         Assert.assertEquals(1, processGroupContentsDTO.getLabels().size());
-//        Assert.assertEquals(1, processGroupContentsDTO.getRemoteProcessGroups().size());
     }
 
     /**
      * Verifies the admin user cannot update a group.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testGroupPut() throws Exception {
@@ -163,7 +162,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user can retrieve the controller configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testControllerConfigurationGet() throws Exception {
@@ -188,7 +187,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user cannot update the controller configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testControllerConfigurationPut() throws Exception {
@@ -212,7 +211,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user cannot create a new flow archive.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testFlowConfigurationArchivePost() throws Exception {
@@ -232,7 +231,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user can retrieve his credentials.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testAuthoritiesGet() throws Exception {
@@ -255,7 +254,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user can retrieve the banners.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testBannersGet() throws Exception {
@@ -278,7 +277,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user can retrieve the processor types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorTypesGet() throws Exception {
@@ -300,7 +299,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user can retrieve the prioritizer types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testPrioritizerTypesGet() throws Exception {
@@ -325,7 +324,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorGroupsGet() throws Exception {
@@ -346,7 +345,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the operator user cannot create new process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPost() throws Exception {
@@ -369,10 +368,9 @@ public class ReadOnlyAccessControlTest {
     }
 
     /**
-     * Verifies that the operator user cannot update process group
-     * configuration.
+     * Verifies that the operator user cannot update process group configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPut() throws Exception {
@@ -397,7 +395,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the operator user cannot delete process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupDelete() throws Exception {
@@ -416,7 +414,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorsGet() throws Exception {
@@ -437,7 +435,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create new processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPost() throws Exception {
@@ -461,7 +459,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create new processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPut() throws Exception {
@@ -485,7 +483,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot delete processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorDelete() throws Exception {
@@ -504,7 +502,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionsGet() throws Exception {
@@ -525,7 +523,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionPost() throws Exception {
@@ -549,7 +547,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionPut() throws Exception {
@@ -573,7 +571,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot delete connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionDelete() throws Exception {
@@ -592,7 +590,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortsGet() throws Exception {
@@ -613,7 +611,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortPost() throws Exception {
@@ -638,7 +636,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortPut() throws Exception {
@@ -663,7 +661,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot delete input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortDelete() throws Exception {
@@ -682,7 +680,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortsGet() throws Exception {
@@ -703,7 +701,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot create output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortPost() throws Exception {
@@ -728,7 +726,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortPut() throws Exception {
@@ -753,7 +751,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user cannot delete output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortDelete() throws Exception {
@@ -772,7 +770,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the admin user can get input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelsGet() throws Exception {
@@ -793,7 +791,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelPost() throws Exception {
@@ -817,7 +815,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot create labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelPut() throws Exception {
@@ -841,7 +839,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies that the read only user cannot delete labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelDelete() throws Exception {
@@ -854,93 +852,13 @@ public class ReadOnlyAccessControlTest {
         Assert.assertEquals(403, response.getStatus());
     }
 
-//    // ----------------------------------------------
-//    // REMOTE PROCESS GROUP
-//    // ----------------------------------------------
-//    
-//    /**
-//     * Verifies that the admin user can get input ports.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupsGet() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // perform the request
-//        ClientResponse response = READ_ONLY_USER.testGet(url);
-//        
-//        // get the response
-//        RemoteProcessGroupsEntity entity = response.getEntity(RemoteProcessGroupsEntity.class);
-//        
-//        // ensure the request was successful
-//        Assert.assertEquals(200, response.getStatus());
-//        Assert.assertNotNull(entity.getRemoteProcessGroups());
-//        Assert.assertEquals(1, entity.getRemoteProcessGroups().size());
-//    }
-//    
-//    /**
-//     * Verifies that the read only user cannot create new remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupPost() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        
-//        // perform the request
-//        ClientResponse response = READ_ONLY_USER.testPost(url, entity);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
-//    
-//    /**
-//     * Verifies that the read only user cannot update a remote process group.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupPut() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups/1";
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        
-//        // perform the request
-//        ClientResponse response = READ_ONLY_USER.testPut(url, entity);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
-//    
-//    /**
-//     * Verifies that the read only user cannot delete remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupDelete() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups/1";
-//        
-//        // perform the request
-//        ClientResponse response = READ_ONLY_USER.testDelete(url);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
     // ----------------------------------------------
     // HISTORY
     // ----------------------------------------------
     /**
      * Tests the ability to retrieve the NiFi history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryGet() throws Exception {
@@ -960,7 +878,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Tests the ability to retrieve a specific action.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testActionGet() throws Exception {
@@ -977,7 +895,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the read only user cannot purge history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryDelete() throws Exception {
@@ -996,7 +914,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Tests the ability to retrieve the NiFi users.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUsersGet() throws Exception {
@@ -1012,7 +930,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Tests the ability to retrieve a specific user.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUserGet() throws Exception {
@@ -1029,7 +947,7 @@ public class ReadOnlyAccessControlTest {
     /**
      * Verifies the admin user can update a person.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUserPut() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestAuthorizationProvider.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestAuthorizationProvider.java
index 48f18a9..d51b7df 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestAuthorizationProvider.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestAuthorizationProvider.java
@@ -71,7 +71,7 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Determines if the specified dn is known to this authority provider.
      *
-     * @param dn
+     * @param dn dn
      * @return True if he dn is known, false otherwise
      */
     @Override
@@ -87,10 +87,10 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Loads the authorities for the specified user.
      *
-     * @param dn
-     * @return
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
+     * @param dn dn
+     * @return authorities
+     * @throws UnknownIdentityException ex
+     * @throws AuthorityAccessException ex
      */
     @Override
     public Set<Authority> getAuthorities(String dn) throws UnknownIdentityException, AuthorityAccessException {
@@ -101,9 +101,9 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Sets the specified authorities to the specified user.
      *
-     * @param dn
-     * @param authorities
-     * @throws AuthorityAccessException
+     * @param dn dn
+     * @param authorities authorities
+     * @throws AuthorityAccessException ex
      */
     @Override
     public void setAuthorities(String dn, Set<Authority> authorities) throws UnknownIdentityException, AuthorityAccessException {
@@ -112,10 +112,10 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Adds the specified user.
      *
-     * @param dn
-     * @param authorities
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
+     * @param dn dn
+     * @param group group
+     * @throws UnknownIdentityException ex
+     * @throws AuthorityAccessException ex
      */
     @Override
     public void addUser(String dn, String group) throws AuthorityAccessException {
@@ -124,9 +124,9 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Gets the users for the specified authority.
      *
-     * @param authority
-     * @return
-     * @throws AuthorityAccessException
+     * @param authority authority
+     * @return users
+     * @throws AuthorityAccessException ex
      */
     @Override
     public Set<String> getUsers(Authority authority) throws AuthorityAccessException {
@@ -142,9 +142,9 @@ public class NiFiTestAuthorizationProvider implements AuthorityProvider {
     /**
      * Removes the specified user.
      *
-     * @param dn
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
+     * @param dn dn
+     * @throws UnknownIdentityException ex
+     * @throws AuthorityAccessException ex
      */
     @Override
     public void revokeUser(String dn) throws UnknownIdentityException, AuthorityAccessException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestServer.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestServer.java
index 24dd8be..42b0aab 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestServer.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestServer.java
@@ -130,7 +130,6 @@ public class NiFiTestServer {
         jetty.addConnector(https);
     }
 
-
     public void startServer() throws Exception {
         jetty.start();
 
@@ -169,16 +168,14 @@ public class NiFiTestServer {
     }
 
     /**
-     * Convenience method to provide access to Spring beans accessible from the
-     * web application context.
+     * Convenience method to provide access to Spring beans accessible from the web application context.
      *
      * @param <T> target cast
      * @param beanName name of the spring bean
      * @param clazz class of the spring bean
      * @return Spring bean with given name and class type
      *
-     * @throws ClassCastException if the bean found cannot be cast to the given
-     * class type
+     * @throws ClassCastException if the bean found cannot be cast to the given class type
      */
     public <T> T getSpringBean(String beanName, Class<T> clazz) {
         ServletContext servletContext = webappContext.getServletHandler().getServletContext();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestUser.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestUser.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestUser.java
index 3acef72..52f4522 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestUser.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/util/NiFiTestUser.java
@@ -32,8 +32,8 @@ public class NiFiTestUser {
 
     public static final long REVISION = 0L;
 
-    private Client client;
-    private String proxyDn;
+    private final Client client;
+    private final String proxyDn;
 
     public NiFiTestUser(Client client, String dn) {
         this.client = client;
@@ -43,10 +43,9 @@ public class NiFiTestUser {
     /**
      * Performs a GET using the specified url.
      *
-     * @param client
-     * @param url
-     * @return
-     * @throws Exception
+     * @param url url
+     * @return response
+     * @throws Exception ex
      */
     public ClientResponse testGet(String url) throws Exception {
         return testGet(url, null);
@@ -55,10 +54,9 @@ public class NiFiTestUser {
     /**
      * Performs a GET using the specified url and query parameters.
      *
-     * @param client
-     * @param url
-     * @param queryParams
-     * @return
+     * @param url url
+     * @param queryParams params
+     * @return response
      */
     public ClientResponse testGet(String url, Map<String, String> queryParams) {
         // get the resource
@@ -78,10 +76,9 @@ public class NiFiTestUser {
     /**
      * Performs a POST using the specified url.
      *
-     * @param client
-     * @param url
-     * @return
-     * @throws Exception
+     * @param url url
+     * @return response
+     * @throws Exception ex
      */
     public ClientResponse testPost(String url) throws Exception {
         return testPost(url, (Object) null);
@@ -90,11 +87,10 @@ public class NiFiTestUser {
     /**
      * Performs a POST using the specified url and entity body.
      *
-     * @param client
-     * @param url
-     * @param entity
-     * @return
-     * @throws Exception
+     * @param url url
+     * @param entity entity
+     * @return response
+     * @throws Exception ex
      */
     public ClientResponse testPost(String url, Object entity) throws Exception {
         // get the resource
@@ -112,11 +108,10 @@ public class NiFiTestUser {
     /**
      * Performs a POST using the specified url and entity body.
      *
-     * @param client
-     * @param url
-     * @param entity
-     * @return
-     * @throws Exception
+     * @param url url
+     * @param entity entity
+     * @return repsonse
+     * @throws Exception ex
      */
     public ClientResponse testPostMultiPart(String url, Object entity) throws Exception {
         // get the resource
@@ -134,9 +129,10 @@ public class NiFiTestUser {
     /**
      * Performs a POST using the specified url and form data.
      *
-     * @param url
-     * @param formData
-     * @return
+     * @param url url
+     * @param formData form data
+     * @return response
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testPost(String url, Map<String, String> formData) throws Exception {
         // convert the form data
@@ -146,7 +142,8 @@ public class NiFiTestUser {
         }
 
         // get the resource
-        WebResource.Builder resourceBuilder = client.resource(url).accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_FORM_URLENCODED).header(X509AuthenticationFilter.PROXY_ENTITIES_CHAIN, proxyDn);
+        WebResource.Builder resourceBuilder
+                = client.resource(url).accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_FORM_URLENCODED).header(X509AuthenticationFilter.PROXY_ENTITIES_CHAIN, proxyDn);
 
         // add the form data if necessary
         if (!entity.isEmpty()) {
@@ -160,9 +157,10 @@ public class NiFiTestUser {
     /**
      * Performs a PUT using the specified url and entity body.
      *
-     * @param url
-     * @param entity
-     * @return
+     * @param url url
+     * @param entity entity
+     * @return response
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testPut(String url, Object entity) throws Exception {
         // get the resource
@@ -180,9 +178,10 @@ public class NiFiTestUser {
     /**
      * Performs a PUT using the specified url and form data.
      *
-     * @param url
-     * @param formData
-     * @return
+     * @param url url
+     * @param formData form data
+     * @return response
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testPut(String url, Map<String, String> formData) throws Exception {
         // convert the form data
@@ -192,7 +191,8 @@ public class NiFiTestUser {
         }
 
         // get the resource
-        WebResource.Builder resourceBuilder = client.resource(url).accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_FORM_URLENCODED).header(X509AuthenticationFilter.PROXY_ENTITIES_CHAIN, proxyDn);
+        WebResource.Builder resourceBuilder
+                = client.resource(url).accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_FORM_URLENCODED).header(X509AuthenticationFilter.PROXY_ENTITIES_CHAIN, proxyDn);
 
         // add the form data if necessary
         if (!entity.isEmpty()) {
@@ -206,7 +206,9 @@ public class NiFiTestUser {
     /**
      * Performs a DELETE using the specified url.
      *
-     * @param url
+     * @param url url
+     * @return response
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testDelete(String url) throws Exception {
         return testDelete(url, (Object) null);
@@ -215,9 +217,10 @@ public class NiFiTestUser {
     /**
      * Performs a DELETE using the specified url and entity.
      *
-     * @param url
-     * @param entity
-     * @return
+     * @param url url
+     * @param entity entity
+     * @return repsonse
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testDelete(String url, Object entity) throws Exception {
         // get the resource
@@ -235,9 +238,10 @@ public class NiFiTestUser {
     /**
      * Performs a DELETE using the specified url and query parameters.
      *
-     * @param url
-     * @param queryParams
-     * @return
+     * @param url url
+     * @param queryParams params
+     * @return response
+     * @throws java.lang.Exception ex
      */
     public ClientResponse testDelete(String url, Map<String, String> queryParams) throws Exception {
         // get the resource

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-docs/src/main/java/org/apache/nifi/web/docs/DocumentationController.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-docs/src/main/java/org/apache/nifi/web/docs/DocumentationController.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-docs/src/main/java/org/apache/nifi/web/docs/DocumentationController.java
index 7d8ec6c..5f9dfdf 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-docs/src/main/java/org/apache/nifi/web/docs/DocumentationController.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-docs/src/main/java/org/apache/nifi/web/docs/DocumentationController.java
@@ -41,7 +41,7 @@ public class DocumentationController extends HttpServlet {
 
     private static final int GENERAL_LINK_COUNT = 4;
     private static final int DEVELOPER_LINK_COUNT = 2;
-    
+
     // context for accessing the extension mapping
     private ServletContext servletContext;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/pom.xml b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/pom.xml
index bb90ab5..d3af86e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/pom.xml
@@ -216,23 +216,6 @@
     </build>
     <profiles>
         <profile>
-            <id>dev</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <configuration>
-                            <skip>true</skip>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
             <id>minify-and-compress</id>
             <activation>
                 <activeByDefault>true</activeByDefault>


[17/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/resources/docs/org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer/additionalDetails.html
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/resources/docs/org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer/additionalDetails.html b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/resources/docs/org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer/additionalDetails.html
index ad98221..740abec 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/resources/docs/org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer/additionalDetails.html
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/resources/docs/org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer/additionalDetails.html
@@ -1,36 +1,36 @@
 <!DOCTYPE html>
 <html lang="en">
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<head>
-<meta charset="utf-8" />
-<title>Distributed Map Cache Client Service</title>
-<link rel="stylesheet" href="../../css/component-usage.css" type="text/css" />
-</head>
+    <!--
+      Licensed to the Apache Software Foundation (ASF) under one or more
+      contributor license agreements.  See the NOTICE file distributed with
+      this work for additional information regarding copyright ownership.
+      The ASF licenses this file to You under the Apache License, Version 2.0
+      (the "License"); you may not use this file except in compliance with
+      the License.  You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+      Unless required by applicable law or agreed to in writing, software
+      distributed under the License is distributed on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+      See the License for the specific language governing permissions and
+      limitations under the License.
+    -->
+    <head>
+        <meta charset="utf-8" />
+        <title>Distributed Map Cache Client Service</title>
+        <link rel="stylesheet" href="../../css/component-usage.css" type="text/css" />
+    </head>
 
-<body>
-	<p>
-		Below is an example of how to create a distributed map cache server for clients to connect to.
-		Note that the identifier in this example is <code>cache-server</code>. If you are using this template
-		to create your own DistributedMapCache server, replace the values in this template with values that are
-		suitable for your system. Possible options for <code>Port</code>, <code>Maximum Cache Entries</code>,
-		<code>Eviction Strategy</code>, <span style="font-style: italic;">SSL Context Service</span>, and
-		<span style="font-style: italic;">Persistence Directory</span>
-	</p>
+    <body>
+        <p>
+            Below is an example of how to create a distributed map cache server for clients to connect to.
+            Note that the identifier in this example is <code>cache-server</code>. If you are using this template
+            to create your own DistributedMapCache server, replace the values in this template with values that are
+            suitable for your system. Possible options for <code>Port</code>, <code>Maximum Cache Entries</code>,
+            <code>Eviction Strategy</code>, <span style="font-style: italic;">SSL Context Service</span>, and
+            <span style="font-style: italic;">Persistence Directory</span>
+        </p>
 
-	<pre>
+        <pre>
 &lt;?xml version="1.0" encoding="UTF-8" ?&gt;
 &lt;services&gt;
     &lt;service&gt;
@@ -41,6 +41,6 @@
         &lt;property name="Eviction Strategy"&gt;Least Recently Used&lt;/property&gt;
     &lt;/service&gt;
 &lt;/services&gt;
-	</pre>
-</body>
+        </pre>
+    </body>
 </html>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/TestServerAndClient.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/TestServerAndClient.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/TestServerAndClient.java
index dfad5a2..42698b8 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/TestServerAndClient.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/TestServerAndClient.java
@@ -152,7 +152,7 @@ public class TestServerAndClient {
         newServer.shutdownServer();
     }
 
-    @Ignore("Test fails when in a maven parallel build due to address/port already taken - need to vary these so tests can run in parallel")    
+    @Ignore("Test fails when in a maven parallel build due to address/port already taken - need to vary these so tests can run in parallel")
     @Test
     public void testPersistentSetServerAndClientWithLFUEvictions() throws InitializationException, IOException {
         LOGGER.info("Testing " + Thread.currentThread().getStackTrace()[1].getMethodName());
@@ -215,7 +215,7 @@ public class TestServerAndClient {
         newServer.shutdownServer();
     }
 
-    @Ignore("Test fails when in a maven parallel build due to address/port already taken - need to vary these so tests can run in parallel")    
+    @Ignore("Test fails when in a maven parallel build due to address/port already taken - need to vary these so tests can run in parallel")
     @Test
     public void testPersistentSetServerAndClientWithFIFOEvictions() throws InitializationException, IOException {
         LOGGER.info("Testing " + Thread.currentThread().getStackTrace()[1].getMethodName());
@@ -374,8 +374,7 @@ public class TestServerAndClient {
     public void testClientTermination() throws InitializationException, IOException, InterruptedException {
 
         /**
-         * This bypasses the test for build environments in OS X running Java 1.8 due to a JVM bug
-         * See:  https://issues.apache.org/jira/browse/NIFI-437
+         * This bypasses the test for build environments in OS X running Java 1.8 due to a JVM bug See: https://issues.apache.org/jira/browse/NIFI-437
          */
         Assume.assumeFalse("testClientTermination is skipped due to build environment being OS X with JDK 1.8. See https://issues.apache.org/jira/browse/NIFI-437",
                 SystemUtils.IS_OS_MAC && SystemUtils.IS_JAVA_1_8);
@@ -509,6 +508,7 @@ public class TestServerAndClient {
     }
 
     private static class StringSerializer implements Serializer<String> {
+
         @Override
         public void serialize(final String value, final OutputStream output) throws SerializationException, IOException {
             output.write(value.getBytes(StandardCharsets.UTF_8));
@@ -516,6 +516,7 @@ public class TestServerAndClient {
     }
 
     private static class StringDeserializer implements Deserializer<String> {
+
         @Override
         public String deserialize(final byte[] input) throws DeserializationException, IOException {
             return (input.length == 0) ? null : new String(input, StandardCharsets.UTF_8);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/pom.xml b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/pom.xml
index bb3f366..2f87d46 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/pom.xml
@@ -14,24 +14,24 @@
   limitations under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.nifi</groupId>
-    <artifactId>nifi-standard-services</artifactId>
-    <version>0.1.0-incubating-SNAPSHOT</version>
-  </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-standard-services</artifactId>
+        <version>0.1.0-incubating-SNAPSHOT</version>
+    </parent>
   
-  <artifactId>nifi-http-context-map-api</artifactId>
+    <artifactId>nifi-http-context-map-api</artifactId>
   
-  <dependencies>
-  	<dependency>
-  		<groupId>org.apache.nifi</groupId>
-  		<artifactId>nifi-api</artifactId>
-  	</dependency>
-  	<dependency>
-  		<groupId>javax.servlet</groupId>
-  		<artifactId>javax.servlet-api</artifactId>
-  	</dependency>
-  </dependencies>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>javax.servlet-api</artifactId>
+        </dependency>
+    </dependencies>
   
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/src/main/java/org/apache/nifi/http/HttpContextMap.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/src/main/java/org/apache/nifi/http/HttpContextMap.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/src/main/java/org/apache/nifi/http/HttpContextMap.java
index 04ff6ce..0dcff03 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/src/main/java/org/apache/nifi/http/HttpContextMap.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-api/src/main/java/org/apache/nifi/http/HttpContextMap.java
@@ -22,51 +22,48 @@ import javax.servlet.http.HttpServletResponse;
 
 import org.apache.nifi.controller.ControllerService;
 
-
 /**
  * <p>
- * An interface that provides the capability of receiving an HTTP servlet request in one component
- * and responding to that request in another component.
+ * An interface that provides the capability of receiving an HTTP servlet request in one component and responding to that request in another component.
  * </p>
- * 
+ *
  * <p>
- * The intended flow is for the component receiving the HTTP request to register the request, response,
- * and AsyncContext with a particular identifier via the 
- * {@link #register(String, HttpServletRequest, HttpServletResponse, AsyncContext)}
- * method. Another component is then able to obtain the response
- * by providing that identifier to the {@link #getResponse(String)} method. After writing to the 
- * HttpServletResponse, the transaction is to then be completed via the {@link #complete(String)} method.
+ * The intended flow is for the component receiving the HTTP request to register the request, response, and AsyncContext with a particular identifier via the
+ * {@link #register(String, HttpServletRequest, HttpServletResponse, AsyncContext)} method. Another component is then able to obtain the response by providing that identifier to the
+ * {@link #getResponse(String)} method. After writing to the HttpServletResponse, the transaction is to then be completed via the {@link #complete(String)} method.
  * </p>
  */
 public interface HttpContextMap extends ControllerService {
 
     /**
      * Registers an HttpServletRequest, HttpServletResponse, and the AsyncContext for a given identifier
-     * 
-     * @param identifier
-     * @param request
-     * @param response
-     * @param context
-	 *
-	 * @return true if register is successful, false if the context map is too full because too many requests have already been received and not processed
-     * 
+     *
+     * @param identifier identifier
+     * @param request request
+     * @param response response
+     * @param context context
+     *
+     * @return true if register is successful, false if the context map is too full because too many requests have already been received and not processed
+     *
      * @throws IllegalStateException if the identifier is already registered
      */
     boolean register(String identifier, HttpServletRequest request, HttpServletResponse response, AsyncContext context);
-    
+
     /**
      * Retrieves the HttpServletResponse for the given identifier, if it exists
-     * @param identifier
+     *
+     * @param identifier identifier
      * @return the HttpServletResponse for the given identifier, or {@code null} if it does not exist
      */
     HttpServletResponse getResponse(String identifier);
-    
+
     /**
      * Marks the HTTP request/response for the given identifier as complete
-     * @param identifier
-     * 
+     *
+     * @param identifier identifier
+     *
      * @throws IllegalStateException if the identifier is not registered to a valid AsyncContext
      */
     void complete(String identifier);
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/pom.xml b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/pom.xml
index cf4b3cb..eb2abdd 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/pom.xml
@@ -24,21 +24,21 @@
     <packaging>jar</packaging>
 	
     <dependencies>
-    	<dependency>
-    		<groupId>org.apache.nifi</groupId>
-    		<artifactId>nifi-api</artifactId>
-    	</dependency>
-    	<dependency>
-    		<groupId>org.apache.nifi</groupId>
-    		<artifactId>nifi-processor-utils</artifactId>
-    	</dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-processor-utils</artifactId>
+        </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-http-context-map-api</artifactId>
         </dependency>
         <dependency>
-        	<groupId>javax.servlet</groupId>
-        	<artifactId>javax.servlet-api</artifactId>
+            <groupId>javax.servlet</groupId>
+            <artifactId>javax.servlet-api</artifactId>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
index 5e31270..bd3e866 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
@@ -42,34 +42,35 @@ import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @Tags({"http", "request", "response"})
-@SeeAlso(classNames={
-        "org.apache.nifi.processors.standard.HandleHttpRequest", 
-        "org.apache.nifi.processors.standard.HandleHttpResponse"})
+@SeeAlso(classNames = {
+    "org.apache.nifi.processors.standard.HandleHttpRequest",
+    "org.apache.nifi.processors.standard.HandleHttpResponse"})
 @CapabilityDescription("Provides the ability to store and retrieve HTTP requests and responses external to a Processor, so that "
         + "multiple Processors can interact with the same HTTP request.")
 public class StandardHttpContextMap extends AbstractControllerService implements HttpContextMap {
+
     public static final PropertyDescriptor MAX_OUTSTANDING_REQUESTS = new PropertyDescriptor.Builder()
-        .name("Maximum Outstanding Requests")
-        .description("The maximum number of HTTP requests that can be outstanding at any one time. Any attempt to register an additional HTTP Request will cause an error")
-        .required(true)
-        .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
-        .defaultValue("5000")
-        .build();
+            .name("Maximum Outstanding Requests")
+            .description("The maximum number of HTTP requests that can be outstanding at any one time. Any attempt to register an additional HTTP Request will cause an error")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .defaultValue("5000")
+            .build();
     public static final PropertyDescriptor REQUEST_EXPIRATION = new PropertyDescriptor.Builder()
-        .name("Request Expiration")
-        .description("Specifies how long an HTTP Request should be left unanswered before being evicted from the cache and being responded to with a Service Unavailable status code")
-        .required(true)
-        .expressionLanguageSupported(false)
-        .defaultValue("1 min")
-        .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
-        .build();
-    
+            .name("Request Expiration")
+            .description("Specifies how long an HTTP Request should be left unanswered before being evicted from the cache and being responded to with a Service Unavailable status code")
+            .required(true)
+            .expressionLanguageSupported(false)
+            .defaultValue("1 min")
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .build();
+
     private final ConcurrentMap<String, Wrapper> wrapperMap = new ConcurrentHashMap<>();
-    
+
     private volatile int maxSize = 5000;
     private volatile long maxRequestNanos;
     private volatile ScheduledExecutorService executor;
-    
+
     @Override
     protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
         final List<PropertyDescriptor> properties = new ArrayList<>(2);
@@ -77,67 +78,68 @@ public class StandardHttpContextMap extends AbstractControllerService implements
         properties.add(REQUEST_EXPIRATION);
         return properties;
     }
-    
+
     @OnEnabled
     public void onConfigured(final ConfigurationContext context) {
         maxSize = context.getProperty(MAX_OUTSTANDING_REQUESTS).asInteger();
         executor = Executors.newSingleThreadScheduledExecutor();
-        
+
         maxRequestNanos = context.getProperty(REQUEST_EXPIRATION).asTimePeriod(TimeUnit.NANOSECONDS);
         final long scheduleNanos = maxRequestNanos / 2;
         executor.scheduleWithFixedDelay(new CleanupExpiredRequests(), scheduleNanos, scheduleNanos, TimeUnit.NANOSECONDS);
     }
-    
+
     @OnDisabled
     public void cleanup() {
-        if ( executor != null ) {
+        if (executor != null) {
             executor.shutdown();
         }
     }
-    
+
     @Override
     public boolean register(final String identifier, final HttpServletRequest request, final HttpServletResponse response, final AsyncContext context) {
         // fail if there are too many already. Maybe add a configuration property for how many
         // outstanding, with a default of say 5000
-        if ( wrapperMap.size() >= maxSize ) {
-			return false;
+        if (wrapperMap.size() >= maxSize) {
+            return false;
         }
         final Wrapper wrapper = new Wrapper(request, response, context);
         final Wrapper existing = wrapperMap.putIfAbsent(identifier, wrapper);
-        if ( existing != null ) {
+        if (existing != null) {
             throw new IllegalStateException("HTTP Request already registered with identifier " + identifier);
         }
-		
-		return true;
+
+        return true;
     }
 
     @Override
     public HttpServletResponse getResponse(final String identifier) {
         final Wrapper wrapper = wrapperMap.get(identifier);
-        if ( wrapper == null ) {
+        if (wrapper == null) {
             return null;
         }
-        
+
         return wrapper.getResponse();
     }
 
     @Override
     public void complete(final String identifier) {
         final Wrapper wrapper = wrapperMap.remove(identifier);
-        if ( wrapper == null ) {
+        if (wrapper == null) {
             throw new IllegalStateException("No HTTP Request registered with identifier " + identifier);
         }
-        
+
         wrapper.getAsync().complete();
     }
 
     private static class Wrapper {
+
         @SuppressWarnings("unused")
         private final HttpServletRequest request;
         private final HttpServletResponse response;
         private final AsyncContext async;
         private final long nanoTimeAdded = System.nanoTime();
-        
+
         public Wrapper(final HttpServletRequest request, final HttpServletResponse response, final AsyncContext async) {
             this.request = request;
             this.response = response;
@@ -151,24 +153,25 @@ public class StandardHttpContextMap extends AbstractControllerService implements
         public AsyncContext getAsync() {
             return async;
         }
-        
+
         public long getNanoTimeAdded() {
             return nanoTimeAdded;
         }
     }
-    
+
     private class CleanupExpiredRequests implements Runnable {
+
         @Override
         public void run() {
             final long now = System.nanoTime();
             final long threshold = now - maxRequestNanos;
-            
+
             final Iterator<Map.Entry<String, Wrapper>> itr = wrapperMap.entrySet().iterator();
-            while ( itr.hasNext() ) {
+            while (itr.hasNext()) {
                 final Map.Entry<String, Wrapper> entry = itr.next();
-                if ( entry.getValue().getNanoTimeAdded() < threshold ) {
+                if (entry.getValue().getNanoTimeAdded() < threshold) {
                     itr.remove();
-                
+
                     // send SERVICE_UNAVAILABLE
                     try {
                         final AsyncContext async = entry.getValue().getAsync();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/resources/docs/org.apache.nifi.http.StandardHttpContextMap/index.html
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/resources/docs/org.apache.nifi.http.StandardHttpContextMap/index.html b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/resources/docs/org.apache.nifi.http.StandardHttpContextMap/index.html
index 5c8b83a..774c3d9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/resources/docs/org.apache.nifi.http.StandardHttpContextMap/index.html
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/resources/docs/org.apache.nifi.http.StandardHttpContextMap/index.html
@@ -22,15 +22,15 @@
     </head>
 
     <body>
-		<h2>Description:</h2>
-		<p>
-			This is the standard implementation of the SSL Context Map. This service is used to provide
-			coordination between 
-			<a href="../org.apache.nifi.processors.standard.HandleHttpRequest/index.html">HandleHttpRequest</a>
-			and
-			<a href="../org.apache.nifi.processors.standard.HandleHttpResponse/index.html">HandleHttpResponse</a>
-			Processors.
-		</p>
+        <h2>Description:</h2>
+        <p>
+            This is the standard implementation of the SSL Context Map. This service is used to provide
+            coordination between 
+            <a href="../org.apache.nifi.processors.standard.HandleHttpRequest/index.html">HandleHttpRequest</a>
+            and
+            <a href="../org.apache.nifi.processors.standard.HandleHttpResponse/index.html">HandleHttpResponse</a>
+            Processors.
+        </p>
 
         <!-- Service Documentation ================================================== -->
         <h2>Configuring the HTTP Context Map:</h2>
@@ -40,9 +40,9 @@
         </p>
 
         <p>
-        	This controller service exposes a single property named <code>Maximum Outstanding Requests</code>.
-        	This property determines the maximum number of HTTP requests that can be outstanding at any one time. 
-        	Any attempt to register an additional HTTP Request will cause an error. The default value is 5000.
+            This controller service exposes a single property named <code>Maximum Outstanding Requests</code>.
+            This property determines the maximum number of HTTP requests that can be outstanding at any one time. 
+            Any attempt to register an additional HTTP Request will cause an error. The default value is 5000.
             Below is an example of the template for a StandardHttpContextMap controller service.
         </p>
 
@@ -56,12 +56,12 @@
     &lt;/service&gt;
 &lt;/services&gt;
         </pre>
-        
+
         <p>
-		<strong>See Also:</strong><br />
-		<a href="../org.apache.nifi.processors.standard.HandleHttpRequest/index.html">HandleHttpRequest</a><br />
-		<a href="../org.apache.nifi.processors.standard.HandleHttpResponse/index.html">HandleHttpResponse</a><br />
-		</p>
-        
+            <strong>See Also:</strong><br />
+            <a href="../org.apache.nifi.processors.standard.HandleHttpRequest/index.html">HandleHttpRequest</a><br />
+            <a href="../org.apache.nifi.processors.standard.HandleHttpResponse/index.html">HandleHttpResponse</a><br />
+        </p>
+
     </body>
 </html>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/main/java/org/apache/nifi/ssl/StandardSSLContextService.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/main/java/org/apache/nifi/ssl/StandardSSLContextService.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/main/java/org/apache/nifi/ssl/StandardSSLContextService.java
index 34f1844..cde71da 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/main/java/org/apache/nifi/ssl/StandardSSLContextService.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/main/java/org/apache/nifi/ssl/StandardSSLContextService.java
@@ -205,7 +205,7 @@ public class StandardSSLContextService extends AbstractControllerService impleme
         }
         return results;
     }
-    
+
     private void verifySslConfig(final ValidationContext validationContext) throws ProcessException {
         try {
             final String keystoreFile = validationContext.getProperty(KEYSTORE).getValue();
@@ -237,7 +237,6 @@ public class StandardSSLContextService extends AbstractControllerService impleme
             throw new ProcessException(e);
         }
     }
-    
 
     @Override
     public SSLContext createSSLContext(final ClientAuth clientAuth) throws ProcessException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/test/java/org/apache/nifi/ssl/SSLContextServiceTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/test/java/org/apache/nifi/ssl/SSLContextServiceTest.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/test/java/org/apache/nifi/ssl/SSLContextServiceTest.java
index 7d191fb..1e22dee 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/test/java/org/apache/nifi/ssl/SSLContextServiceTest.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-ssl-context-bundle/nifi-ssl-context-service/src/test/java/org/apache/nifi/ssl/SSLContextServiceTest.java
@@ -73,7 +73,7 @@ public class SSLContextServiceTest {
         properties.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "wrongpassword");
         properties.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         runner.addControllerService("test-bad4", service, properties);
-        
+
         runner.assertNotValid(service);
     }
 
@@ -126,7 +126,7 @@ public class SSLContextServiceTest {
             properties.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
             runner.addControllerService("test-good2", service, properties);
             runner.enableControllerService(service);
-            
+
             runner.setProperty("SSL Context Svc ID", "test-good2");
             runner.assertValid();
             Assert.assertNotNull(service);


[07/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsProperties.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsProperties.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsProperties.java
index 8332082..ed73569 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsProperties.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsProperties.java
@@ -37,144 +37,144 @@ public class JmsProperties {
     public static final String MSG_TYPE_EMPTY = "empty";
 
     // Standard JMS Properties
-    public static final PropertyDescriptor JMS_PROVIDER = new PropertyDescriptor.Builder().
-            name("JMS Provider").
-            description("The Provider used for the JMS Server").
-            required(true).
-            allowableValues(ACTIVEMQ_PROVIDER).
-            defaultValue(ACTIVEMQ_PROVIDER).
-            build();
-    public static final PropertyDescriptor URL = new PropertyDescriptor.Builder().
-            name("URL").
-            description("The URL of the JMS Server").
-            addValidator(StandardValidators.URI_VALIDATOR).
-            required(true).
-            build();
-    public static final PropertyDescriptor TIMEOUT = new PropertyDescriptor.Builder().
-            name("Communications Timeout").
-            description("The amount of time to wait when attempting to receive a message before giving up and assuming failure").
-            required(true).
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            defaultValue("30 sec").
-            build();
-    public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder().
-            name("Username").
-            description("Username used for authentication and authorization").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder().
-            name("Password").
-            description("Password used for authentication and authorization").
-            required(false).
-            addValidator(Validator.VALID).
-            sensitive(true).
-            build();
-    public static final PropertyDescriptor CLIENT_ID_PREFIX = new PropertyDescriptor.Builder().
-            name("Client ID Prefix").
-            description("A human-readable ID that can be used to associate connections with yourself so that the maintainers of the JMS Server know who to contact if problems arise").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
+    public static final PropertyDescriptor JMS_PROVIDER = new PropertyDescriptor.Builder()
+            .name("JMS Provider")
+            .description("The Provider used for the JMS Server")
+            .required(true)
+            .allowableValues(ACTIVEMQ_PROVIDER)
+            .defaultValue(ACTIVEMQ_PROVIDER)
+            .build();
+    public static final PropertyDescriptor URL = new PropertyDescriptor.Builder()
+            .name("URL")
+            .description("The URL of the JMS Server")
+            .addValidator(StandardValidators.URI_VALIDATOR)
+            .required(true)
+            .build();
+    public static final PropertyDescriptor TIMEOUT = new PropertyDescriptor.Builder()
+            .name("Communications Timeout")
+            .description("The amount of time to wait when attempting to receive a message before giving up and assuming failure")
+            .required(true)
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .defaultValue("30 sec")
+            .build();
+    public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder()
+            .name("Username")
+            .description("Username used for authentication and authorization")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder()
+            .name("Password")
+            .description("Password used for authentication and authorization")
+            .required(false)
+            .addValidator(Validator.VALID)
+            .sensitive(true)
+            .build();
+    public static final PropertyDescriptor CLIENT_ID_PREFIX = new PropertyDescriptor.Builder()
+            .name("Client ID Prefix")
+            .description("A human-readable ID that can be used to associate connections with yourself so that the maintainers of the JMS Server know who to contact if problems arise")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
 
     // Topic/Queue determination Properties
-    public static final PropertyDescriptor DESTINATION_NAME = new PropertyDescriptor.Builder().
-            name("Destination Name").
-            description("The name of the JMS Topic or queue to use").
-            required(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor DESTINATION_TYPE = new PropertyDescriptor.Builder().
-            name("Destination Type").
-            description("The type of the JMS Destination to use").
-            required(true).
-            allowableValues(DESTINATION_TYPE_QUEUE, DESTINATION_TYPE_TOPIC).
-            defaultValue(DESTINATION_TYPE_QUEUE).
-            build();
+    public static final PropertyDescriptor DESTINATION_NAME = new PropertyDescriptor.Builder()
+            .name("Destination Name")
+            .description("The name of the JMS Topic or queue to use")
+            .required(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor DESTINATION_TYPE = new PropertyDescriptor.Builder()
+            .name("Destination Type")
+            .description("The type of the JMS Destination to use")
+            .required(true)
+            .allowableValues(DESTINATION_TYPE_QUEUE, DESTINATION_TYPE_TOPIC)
+            .defaultValue(DESTINATION_TYPE_QUEUE)
+            .build();
 
-    public static final PropertyDescriptor DURABLE_SUBSCRIPTION = new PropertyDescriptor.Builder().
-            name("Use Durable Subscription").
-            description("If true, connections to the specified topic will use Durable Subscription so that messages are queued when we are not pulling them").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("false").
-            build();
+    public static final PropertyDescriptor DURABLE_SUBSCRIPTION = new PropertyDescriptor.Builder()
+            .name("Use Durable Subscription")
+            .description("If true, connections to the specified topic will use Durable Subscription so that messages are queued when we are not pulling them")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .build();
 
     // JMS Publisher Properties
-    public static final PropertyDescriptor ATTRIBUTES_TO_JMS_PROPS = new PropertyDescriptor.Builder().
-            name("Copy Attributes to JMS Properties").
-            description("Whether or not FlowFile Attributes should be translated into JMS Message Properties. If true, all "
+    public static final PropertyDescriptor ATTRIBUTES_TO_JMS_PROPS = new PropertyDescriptor.Builder()
+            .name("Copy Attributes to JMS Properties")
+            .description("Whether or not FlowFile Attributes should be translated into JMS Message Properties. If true, all "
                     + "attributes starting with 'jms.' will be set as Properties on the JMS Message (without the 'jms.' prefix). "
                     + "If an attribute exists that starts with the same value but ends in '.type', that attribute will be used "
-                    + "to determine the JMS Message Property type.").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
+                    + "to determine the JMS Message Property type.")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
 
     // JMS Listener Properties
-    public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder().
-            name("Message Batch Size").
-            description("The number of messages to pull/push in a single iteration of the processor").
-            required(true).
-            addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).
-            defaultValue("10").
-            build();
-    public static final PropertyDescriptor ACKNOWLEDGEMENT_MODE = new PropertyDescriptor.Builder().
-            name("Acknowledgement Mode").
-            description("The JMS Acknowledgement Mode. Using Auto Acknowledge can cause messages to be lost on restart of NiFi but may provide better performance than Client Acknowledge.").
-            required(true).
-            allowableValues(ACK_MODE_CLIENT, ACK_MODE_AUTO).
-            defaultValue(ACK_MODE_CLIENT).
-            build();
-    public static final PropertyDescriptor JMS_PROPS_TO_ATTRIBUTES = new PropertyDescriptor.Builder().
-            name("Copy JMS Properties to Attributes").
-            description("Whether or not the JMS Message Properties should be copied to the FlowFile Attributes; if so, the attribute name will be jms.XXX, where XXX is the JMS Property name").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
-    public static final PropertyDescriptor MESSAGE_SELECTOR = new PropertyDescriptor.Builder().
-            name("Message Selector").
-            description("The JMS Message Selector to use in order to narrow the messages that are pulled").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
+    public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder()
+            .name("Message Batch Size")
+            .description("The number of messages to pull/push in a single iteration of the processor")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .defaultValue("10")
+            .build();
+    public static final PropertyDescriptor ACKNOWLEDGEMENT_MODE = new PropertyDescriptor.Builder()
+            .name("Acknowledgement Mode")
+            .description("The JMS Acknowledgement Mode. Using Auto Acknowledge can cause messages to be lost on restart of NiFi but may provide better performance than Client Acknowledge.")
+            .required(true)
+            .allowableValues(ACK_MODE_CLIENT, ACK_MODE_AUTO)
+            .defaultValue(ACK_MODE_CLIENT)
+            .build();
+    public static final PropertyDescriptor JMS_PROPS_TO_ATTRIBUTES = new PropertyDescriptor.Builder()
+            .name("Copy JMS Properties to Attributes")
+            .description("Whether or not the JMS Message Properties should be copied to the FlowFile Attributes; if so, the attribute name will be jms.XXX, where XXX is the JMS Property name")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
+    public static final PropertyDescriptor MESSAGE_SELECTOR = new PropertyDescriptor.Builder()
+            .name("Message Selector")
+            .description("The JMS Message Selector to use in order to narrow the messages that are pulled")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
 
     // JMS Producer Properties
-    public static final PropertyDescriptor MESSAGE_TYPE = new PropertyDescriptor.Builder().
-            name("Message Type").
-            description("The Type of JMS Message to Construct").
-            required(true).
-            allowableValues(MSG_TYPE_BYTE, MSG_TYPE_STREAM, MSG_TYPE_TEXT, MSG_TYPE_MAP, MSG_TYPE_EMPTY).
-            defaultValue(MSG_TYPE_BYTE).
-            build();
-    public static final PropertyDescriptor MESSAGE_PRIORITY = new PropertyDescriptor.Builder().
-            name("Message Priority").
-            description("The Priority of the Message").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor REPLY_TO_QUEUE = new PropertyDescriptor.Builder().
-            name("Reply-To Queue").
-            description("The name of the queue to which a reply to should be added").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor MESSAGE_TTL = new PropertyDescriptor.Builder().
-            name("Message Time to Live").
-            description("The amount of time that the message should live on the destination before being removed; if not specified, the message will never expire.").
-            required(false).
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            build();
-    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder().
-            name("Max Buffer Size").
-            description("The maximum amount of data that can be buffered for a JMS Message. If a FlowFile's size exceeds this value, the FlowFile will be routed to failure.").
-            required(true).
-            addValidator(StandardValidators.DATA_SIZE_VALIDATOR).
-            defaultValue("1 MB").
-            build();
+    public static final PropertyDescriptor MESSAGE_TYPE = new PropertyDescriptor.Builder()
+            .name("Message Type")
+            .description("The Type of JMS Message to Construct")
+            .required(true)
+            .allowableValues(MSG_TYPE_BYTE, MSG_TYPE_STREAM, MSG_TYPE_TEXT, MSG_TYPE_MAP, MSG_TYPE_EMPTY)
+            .defaultValue(MSG_TYPE_BYTE)
+            .build();
+    public static final PropertyDescriptor MESSAGE_PRIORITY = new PropertyDescriptor.Builder()
+            .name("Message Priority")
+            .description("The Priority of the Message")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor REPLY_TO_QUEUE = new PropertyDescriptor.Builder()
+            .name("Reply-To Queue")
+            .description("The name of the queue to which a reply to should be added")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor MESSAGE_TTL = new PropertyDescriptor.Builder()
+            .name("Message Time to Live")
+            .description("The amount of time that the message should live on the destination before being removed; if not specified, the message will never expire.")
+            .required(false)
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder()
+            .name("Max Buffer Size")
+            .description("The maximum amount of data that can be buffered for a JMS Message. If a FlowFile's size exceeds this value, the FlowFile will be routed to failure.")
+            .required(true)
+            .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
+            .defaultValue("1 MB")
+            .build();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JsonPathExpressionValidator.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JsonPathExpressionValidator.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JsonPathExpressionValidator.java
index 8a1a056..2a0bd43 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JsonPathExpressionValidator.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JsonPathExpressionValidator.java
@@ -36,10 +36,8 @@ import java.util.regex.Pattern;
 import static java.util.Arrays.asList;
 
 /**
- * JsonPathExpressionValidator performs the same execution as
- * com.jayway.jsonpath.internal.PathCompiler, but does not throw exceptions when
- * an invalid path segment is found. Limited access to create JsonPath objects
- * requires a separate flow of execution in avoiding exceptions.
+ * JsonPathExpressionValidator performs the same execution as com.jayway.jsonpath.internal.PathCompiler, but does not throw exceptions when an invalid path segment is found. Limited access to create
+ * JsonPath objects requires a separate flow of execution in avoiding exceptions.
  *
  * @see
  * <a href="https://github.com/jayway/JsonPath">https://github.com/jayway/JsonPath</a>
@@ -72,8 +70,7 @@ public class JsonPathExpressionValidator {
      * </code>
      *
      * @param path to evaluate for validity
-     * @param filters applied to path expression; this is typically unused in
-     * the context of Processors
+     * @param filters applied to path expression; this is typically unused in the context of Processors
      * @return true if the specified path is valid; false otherwise
      */
     public static boolean isValidExpression(String path, Predicate... filters) {
@@ -138,8 +135,7 @@ public class JsonPathExpressionValidator {
                         } else if (positions == 1 && path.charAt(i) == '*') {
                             fragment = "[*]";
                         } else {
-                            fragment = PROPERTY_OPEN + path.
-                                    substring(i, i + positions) + PROPERTY_CLOSE;
+                            fragment = PROPERTY_OPEN + path.substring(i, i + positions) + PROPERTY_CLOSE;
                         }
                         i += positions;
                     }
@@ -160,8 +156,7 @@ public class JsonPathExpressionValidator {
              * Analyze each component represented by a fragment.  If there is a failure to properly evaluate,
              * a null result is returned
              */
-            PathToken analyzedComponent = PathComponentAnalyzer.
-                    analyze(fragment, filterList);
+            PathToken analyzedComponent = PathComponentAnalyzer.analyze(fragment, filterList);
             if (analyzedComponent == null) {
                 return false;
             }
@@ -219,8 +214,7 @@ public class JsonPathExpressionValidator {
 
     static class PathComponentAnalyzer {
 
-        private static final Pattern FILTER_PATTERN = Pattern.
-                compile("^\\[\\s*\\?\\s*[,\\s*\\?]*?\\s*]$"); //[?] or [?, ?, ...]
+        private static final Pattern FILTER_PATTERN = Pattern.compile("^\\[\\s*\\?\\s*[,\\s*\\?]*?\\s*]$"); //[?] or [?, ?, ...]
         private int i;
         private char current;
 
@@ -248,8 +242,7 @@ public class JsonPathExpressionValidator {
                 return new WildcardPathToken();
             } else if ("[?]".equals(pathFragment)) {
                 return new PredicatePathToken(filterList.poll());
-            } else if (FILTER_PATTERN.matcher(pathFragment).
-                    matches()) {
+            } else if (FILTER_PATTERN.matcher(pathFragment).matches()) {
                 final int criteriaCount = Utils.countMatches(pathFragment, "?");
                 List<Predicate> filters = new ArrayList<>(criteriaCount);
                 for (int i = 0; i < criteriaCount; i++) {
@@ -288,8 +281,7 @@ public class JsonPathExpressionValidator {
             }
             i = bounds[1];
 
-            return new PredicatePathToken(Filter.parse(pathFragment.
-                    substring(bounds[0], bounds[1])));
+            return new PredicatePathToken(Filter.parse(pathFragment.substring(bounds[0], bounds[1])));
         }
 
         int[] findFilterBounds() {
@@ -461,8 +453,7 @@ public class JsonPathExpressionValidator {
                                     sliceFrom = true;
                                 } else {
                                     sliceBetween = true;
-                                    numbers.add(Integer.parseInt(buffer.
-                                            toString()));
+                                    numbers.add(Integer.parseInt(buffer.toString()));
                                     buffer.setLength(0);
                                 }
                             }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/NLKBufferedReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/NLKBufferedReader.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/NLKBufferedReader.java
index 20726b2..c524761 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/NLKBufferedReader.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/NLKBufferedReader.java
@@ -35,8 +35,7 @@ public class NLKBufferedReader extends BufferedReader {
     private static int defaultExpectedLineLength = 80;
 
     /**
-     * Creates a buffering character-input stream that uses an input buffer of
-     * the specified size.
+     * Creates a buffering character-input stream that uses an input buffer of the specified size.
      *
      * @param in A Reader
      * @param sz Input-buffer size
@@ -51,8 +50,7 @@ public class NLKBufferedReader extends BufferedReader {
     }
 
     /**
-     * Creates a buffering character-input stream that uses a default-sized
-     * input buffer.
+     * Creates a buffering character-input stream that uses a default-sized input buffer.
      *
      * @param in A Reader
      */
@@ -61,13 +59,9 @@ public class NLKBufferedReader extends BufferedReader {
     }
 
     /**
-     * Reads a line of text. A line is considered to be terminated by any one of
-     * a line feed ('\n'), a carriage return ('\r'), or a carriage return
-     * followed immediately by a linefeed.
+     * Reads a line of text. A line is considered to be terminated by any one of a line feed ('\n'), a carriage return ('\r'), or a carriage return followed immediately by a linefeed.
      *
-     * @return A String containing the contents of the line, including any
-     * line-termination characters, or null if the end of the stream has been
-     * reached
+     * @return A String containing the contents of the line, including any line-termination characters, or null if the end of the stream has been reached
      *
      * @exception IOException If an I/O error occurs
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
index c8e7b78..5034b83 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
@@ -50,64 +50,61 @@ import com.jcraft.jsch.SftpException;
 
 public class SFTPTransfer implements FileTransfer {
 
-    public static final PropertyDescriptor PRIVATE_KEY_PATH = new PropertyDescriptor.Builder().
-            name("Private Key Path").
-            description("The fully qualified path to the Private Key file").
-            required(false).
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            build();
-    public static final PropertyDescriptor PRIVATE_KEY_PASSPHRASE = new PropertyDescriptor.Builder().
-            name("Private Key Passphrase").
-            description("Password for the private key").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            sensitive(true).
-            build();
-    public static final PropertyDescriptor HOST_KEY_FILE = new PropertyDescriptor.Builder().
-            name("Host Key File").
-            description("If supplied, the given file will be used as the Host Key; otherwise, no use host key file will be used").
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            required(false).
-            build();
-    public static final PropertyDescriptor STRICT_HOST_KEY_CHECKING = new PropertyDescriptor.Builder().
-            name("Strict Host Key Checking").
-            description("Indicates whether or not strict enforcement of hosts keys should be applied").
-            allowableValues("true", "false").
-            defaultValue("false").
-            required(true).
-            build();
-    public static final PropertyDescriptor PORT = new PropertyDescriptor.Builder().
-            name("Port").
-            description("The port that the remote system is listening on for file transfers").
-            addValidator(StandardValidators.PORT_VALIDATOR).
-            required(true).
-            defaultValue("22").
-            build();
-    public static final PropertyDescriptor USE_KEEPALIVE_ON_TIMEOUT = new PropertyDescriptor.Builder().
-            name("Send Keep Alive On Timeout").
-            description("Indicates whether or not to send a single Keep Alive message when SSH socket times out").
-            allowableValues("true", "false").
-            defaultValue("true").
-            required(true).
-            build();
+    public static final PropertyDescriptor PRIVATE_KEY_PATH = new PropertyDescriptor.Builder()
+            .name("Private Key Path")
+            .description("The fully qualified path to the Private Key file")
+            .required(false)
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor PRIVATE_KEY_PASSPHRASE = new PropertyDescriptor.Builder()
+            .name("Private Key Passphrase")
+            .description("Password for the private key")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .sensitive(true)
+            .build();
+    public static final PropertyDescriptor HOST_KEY_FILE = new PropertyDescriptor.Builder()
+            .name("Host Key File")
+            .description("If supplied, the given file will be used as the Host Key; otherwise, no use host key file will be used")
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .required(false)
+            .build();
+    public static final PropertyDescriptor STRICT_HOST_KEY_CHECKING = new PropertyDescriptor.Builder()
+            .name("Strict Host Key Checking")
+            .description("Indicates whether or not strict enforcement of hosts keys should be applied")
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .required(true)
+            .build();
+    public static final PropertyDescriptor PORT = new PropertyDescriptor.Builder()
+            .name("Port")
+            .description("The port that the remote system is listening on for file transfers")
+            .addValidator(StandardValidators.PORT_VALIDATOR)
+            .required(true)
+            .defaultValue("22")
+            .build();
+    public static final PropertyDescriptor USE_KEEPALIVE_ON_TIMEOUT = new PropertyDescriptor.Builder()
+            .name("Send Keep Alive On Timeout")
+            .description("Indicates whether or not to send a single Keep Alive message when SSH socket times out")
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .required(true)
+            .build();
 
     /**
-     * Dynamic property which is used to decide if the
-     * {@link #ensureDirectoryExists(FlowFile, File)} method should perform a
-     * {@link ChannelSftp#ls(String)} before calling
-     * {@link ChannelSftp#mkdir(String)}. In most cases, the code should call ls
-     * before mkdir, but some weird permission setups (chmod 100) on a directory
-     * would cause the 'ls' to throw a permission exception.
+     * Dynamic property which is used to decide if the {@link #ensureDirectoryExists(FlowFile, File)} method should perform a {@link ChannelSftp#ls(String)} before calling
+     * {@link ChannelSftp#mkdir(String)}. In most cases, the code should call ls before mkdir, but some weird permission setups (chmod 100) on a directory would cause the 'ls' to throw a permission
+     * exception.
      * <p>
      * This property is dynamic until deemed a worthy inclusion as proper.
      */
-    public static final PropertyDescriptor DISABLE_DIRECTORY_LISTING = new PropertyDescriptor.Builder().
-            name("Disable Directory Listing").
-            description("Disables directory listings before operations which might fail, such as configurations which create directory structures.").
-            addValidator(StandardValidators.BOOLEAN_VALIDATOR).
-            dynamic(true).
-            defaultValue("false").
-            build();
+    public static final PropertyDescriptor DISABLE_DIRECTORY_LISTING = new PropertyDescriptor.Builder()
+            .name("Disable Directory Listing")
+            .description("Disables directory listings before operations which might fail, such as configurations which create directory structures.")
+            .addValidator(StandardValidators.BOOLEAN_VALIDATOR)
+            .dynamic(true)
+            .defaultValue("false")
+            .build();
 
     private final ProcessorLog logger;
 
@@ -123,10 +120,8 @@ public class SFTPTransfer implements FileTransfer {
         this.ctx = processContext;
         this.logger = logger;
 
-        final PropertyValue disableListing = processContext.
-                getProperty(DISABLE_DIRECTORY_LISTING);
-        disableDirectoryListing = disableListing == null ? false : Boolean.TRUE.
-                equals(disableListing.asBoolean());
+        final PropertyValue disableListing = processContext.getProperty(DISABLE_DIRECTORY_LISTING);
+        disableDirectoryListing = disableListing == null ? false : Boolean.TRUE.equals(disableListing.asBoolean());
     }
 
     @Override
@@ -136,13 +131,9 @@ public class SFTPTransfer implements FileTransfer {
 
     @Override
     public List<FileInfo> getListing() throws IOException {
-        final String path = ctx.getProperty(FileTransfer.REMOTE_PATH).
-                evaluateAttributeExpressions().
-                getValue();
+        final String path = ctx.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions().getValue();
         final int depth = 0;
-        final int maxResults = ctx.
-                getProperty(FileTransfer.REMOTE_POLL_BATCH_SIZE).
-                asInteger();
+        final int maxResults = ctx.getProperty(FileTransfer.REMOTE_POLL_BATCH_SIZE).asInteger();
         final List<FileInfo> listing = new ArrayList<>(1000);
         getListing(path, depth, maxResults, listing);
         return listing;
@@ -154,43 +145,28 @@ public class SFTPTransfer implements FileTransfer {
         }
 
         if (depth >= 100) {
-            logger.
-                    warn(this + " had to stop recursively searching directories at a recursive depth of " + depth + " to avoid memory issues");
+            logger.warn(this + " had to stop recursively searching directories at a recursive depth of " + depth + " to avoid memory issues");
             return;
         }
 
         final boolean ignoreDottedFiles = ctx.
-                getProperty(FileTransfer.IGNORE_DOTTED_FILES).
-                asBoolean();
-        final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).
-                asBoolean();
-        final String fileFilterRegex = ctx.
-                getProperty(FileTransfer.FILE_FILTER_REGEX).
-                getValue();
-        final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.
-                compile(fileFilterRegex);
-        final String pathFilterRegex = ctx.
-                getProperty(FileTransfer.PATH_FILTER_REGEX).
-                getValue();
-        final Pattern pathPattern = (!recurse || pathFilterRegex == null) ? null : Pattern.
-                compile(pathFilterRegex);
-        final String remotePath = ctx.getProperty(FileTransfer.REMOTE_PATH).
-                evaluateAttributeExpressions().
-                getValue();
+                getProperty(FileTransfer.IGNORE_DOTTED_FILES).asBoolean();
+        final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).asBoolean();
+        final String fileFilterRegex = ctx.getProperty(FileTransfer.FILE_FILTER_REGEX).getValue();
+        final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex);
+        final String pathFilterRegex = ctx.getProperty(FileTransfer.PATH_FILTER_REGEX).getValue();
+        final Pattern pathPattern = (!recurse || pathFilterRegex == null) ? null : Pattern.compile(pathFilterRegex);
+        final String remotePath = ctx.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions().getValue();
 
         // check if this directory path matches the PATH_FILTER_REGEX
         boolean pathFilterMatches = true;
         if (pathPattern != null) {
             Path reldir = path == null ? Paths.get(".") : Paths.get(path);
             if (remotePath != null) {
-                reldir = Paths.get(remotePath).
-                        relativize(reldir);
-            }
-            if (reldir != null && !reldir.toString().
-                    isEmpty()) {
-                if (!pathPattern.matcher(reldir.toString().
-                        replace("\\", "/")).
-                        matches()) {
+                reldir = Paths.get(remotePath).relativize(reldir);
+            }
+            if (reldir != null && !reldir.toString().isEmpty()) {
+                if (!pathPattern.matcher(reldir.toString().replace("\\", "/")).matches()) {
                     pathFilterMatches = false;
                 }
             }
@@ -219,19 +195,15 @@ public class SFTPTransfer implements FileTransfer {
                     }
 
                     // if is a directory and we're supposed to recurse
-                    if (recurse && entry.getAttrs().
-                            isDir()) {
+                    if (recurse && entry.getAttrs().isDir()) {
                         subDirs.add(entry);
                         return LsEntrySelector.CONTINUE;
                     }
 
                     // if is not a directory and is not a link and it matches
                     // FILE_FILTER_REGEX - then let's add it
-                    if (!entry.getAttrs().
-                            isDir() && !entry.getAttrs().
-                            isLink() && isPathMatch) {
-                        if (pattern == null || pattern.matcher(entryFilename).
-                                matches()) {
+                    if (!entry.getAttrs().isDir() && !entry.getAttrs().isLink() && isPathMatch) {
+                        if (pattern == null || pattern.matcher(entryFilename).matches()) {
                             listing.add(newFileInfo(entry, path));
                         }
                     }
@@ -245,8 +217,7 @@ public class SFTPTransfer implements FileTransfer {
 
             };
 
-            if (path == null || path.trim().
-                    isEmpty()) {
+            if (path == null || path.trim().isEmpty()) {
                 sftp.ls(".", filter);
             } else {
                 sftp.ls(path, filter);
@@ -258,8 +229,7 @@ public class SFTPTransfer implements FileTransfer {
         for (final LsEntry entry : subDirs) {
             final String entryFilename = entry.getFilename();
             final File newFullPath = new File(path, entryFilename);
-            final String newFullForwardPath = newFullPath.getPath().
-                    replace("\\", "/");
+            final String newFullForwardPath = newFullPath.getPath().replace("\\", "/");
 
             try {
                 getListing(newFullForwardPath, depth + 1, maxResults, listing);
@@ -275,29 +245,22 @@ public class SFTPTransfer implements FileTransfer {
             return null;
         }
         final File newFullPath = new File(path, entry.getFilename());
-        final String newFullForwardPath = newFullPath.getPath().
-                replace("\\", "/");
+        final String newFullForwardPath = newFullPath.getPath().replace("\\", "/");
 
-        String perms = entry.getAttrs().
-                getPermissionsString();
+        String perms = entry.getAttrs().getPermissionsString();
         if (perms.length() > 9) {
             perms = perms.substring(perms.length() - 9);
         }
 
         FileInfo.Builder builder = new FileInfo.Builder()
-                .filename(entry.getFilename()).
-                fullPathFileName(newFullForwardPath).
-                directory(entry.getAttrs().
-                        isDir()).
-                size(entry.getAttrs().
-                        getSize()).
-                lastModifiedTime(entry.getAttrs().
-                        getMTime() * 1000L).
-                permissions(perms).
-                owner(Integer.toString(entry.getAttrs().
-                                getUId())).
-                group(Integer.toString(entry.getAttrs().
-                                getGId()));
+                .filename(entry.getFilename())
+                .fullPathFileName(newFullForwardPath)
+                .directory(entry.getAttrs().isDir())
+                .size(entry.getAttrs().getSize())
+                .lastModifiedTime(entry.getAttrs().getMTime() * 1000L)
+                .permissions(perms)
+                .owner(Integer.toString(entry.getAttrs().getUId()))
+                .group(Integer.toString(entry.getAttrs().getGId()));
         return builder.build();
     }
 
@@ -318,9 +281,7 @@ public class SFTPTransfer implements FileTransfer {
 
     @Override
     public void deleteFile(final String path, final String remoteFileName) throws IOException {
-        final String fullPath = (path == null)
-                ? remoteFileName
-                : (path.endsWith("/")) ? path + remoteFileName : path + "/" + remoteFileName;
+        final String fullPath = (path == null) ? remoteFileName : (path.endsWith("/")) ? path + remoteFileName : path + "/" + remoteFileName;
         try {
             sftp.rm(fullPath);
         } catch (final SftpException e) {
@@ -340,9 +301,7 @@ public class SFTPTransfer implements FileTransfer {
     @Override
     public void ensureDirectoryExists(final FlowFile flowFile, final File directoryName) throws IOException {
         final ChannelSftp channel = getChannel(flowFile);
-        final String remoteDirectory = directoryName.getAbsolutePath().
-                replace("\\", "/").
-                replaceAll("^.\\:", "");
+        final String remoteDirectory = directoryName.getAbsolutePath().replace("\\", "/").replaceAll("^.\\:", "");
 
         // if we disable the directory listing, we just want to blindly perform the mkdir command,
         // eating any exceptions thrown (like if the directory already exists).
@@ -374,13 +333,10 @@ public class SFTPTransfer implements FileTransfer {
 
         if (!exists) {
             // first ensure parent directories exist before creating this one
-            if (directoryName.getParent() != null && !directoryName.
-                    getParentFile().
-                    equals(new File(File.separator))) {
+            if (directoryName.getParent() != null && !directoryName.getParentFile().equals(new File(File.separator))) {
                 ensureDirectoryExists(flowFile, directoryName.getParentFile());
             }
-            logger.
-                    debug("Remote Directory {} does not exist; creating it", new Object[]{remoteDirectory});
+            logger.debug("Remote Directory {} does not exist; creating it", new Object[]{remoteDirectory});
             try {
                 channel.mkdir(remoteDirectory);
                 logger.debug("Created {}", new Object[]{remoteDirectory});
@@ -393,9 +349,7 @@ public class SFTPTransfer implements FileTransfer {
     private ChannelSftp getChannel(final FlowFile flowFile) throws IOException {
         if (sftp != null) {
             String sessionhost = session.getHost();
-            String desthost = ctx.getProperty(HOSTNAME).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
+            String desthost = ctx.getProperty(HOSTNAME).evaluateAttributeExpressions(flowFile).getValue();
             if (sessionhost.equals(desthost)) {
                 // destination matches so we can keep our current session
                 return sftp;
@@ -407,35 +361,22 @@ public class SFTPTransfer implements FileTransfer {
 
         final JSch jsch = new JSch();
         try {
-            final Session session = jsch.getSession(ctx.getProperty(USERNAME).
-                    getValue(),
-                    ctx.getProperty(HOSTNAME).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue(),
-                    ctx.getProperty(PORT).
-                    evaluateAttributeExpressions(flowFile).
-                    asInteger().
-                    intValue());
-
-            final String hostKeyVal = ctx.getProperty(HOST_KEY_FILE).
-                    getValue();
+            final Session session = jsch.getSession(ctx.getProperty(USERNAME).getValue(),
+                    ctx.getProperty(HOSTNAME).evaluateAttributeExpressions(flowFile).getValue(),
+                    ctx.getProperty(PORT).evaluateAttributeExpressions(flowFile).asInteger().intValue());
+
+            final String hostKeyVal = ctx.getProperty(HOST_KEY_FILE).getValue();
             if (hostKeyVal != null) {
                 jsch.setKnownHosts(hostKeyVal);
             }
 
             final Properties properties = new Properties();
-            properties.setProperty("StrictHostKeyChecking", ctx.
-                    getProperty(STRICT_HOST_KEY_CHECKING).
-                    asBoolean() ? "yes" : "no");
-            properties.
-                    setProperty("PreferredAuthentications", "publickey,password");
-
-            if (ctx.getProperty(FileTransfer.USE_COMPRESSION).
-                    asBoolean()) {
-                properties.
-                        setProperty("compression.s2c", "zlib@openssh.com,zlib,none");
-                properties.
-                        setProperty("compression.c2s", "zlib@openssh.com,zlib,none");
+            properties.setProperty("StrictHostKeyChecking", ctx.getProperty(STRICT_HOST_KEY_CHECKING).asBoolean() ? "yes" : "no");
+            properties.setProperty("PreferredAuthentications", "publickey,password");
+
+            if (ctx.getProperty(FileTransfer.USE_COMPRESSION).asBoolean()) {
+                properties.setProperty("compression.s2c", "zlib@openssh.com,zlib,none");
+                properties.setProperty("compression.c2s", "zlib@openssh.com,zlib,none");
             } else {
                 properties.setProperty("compression.s2c", "none");
                 properties.setProperty("compression.c2s", "none");
@@ -443,42 +384,32 @@ public class SFTPTransfer implements FileTransfer {
 
             session.setConfig(properties);
 
-            final String privateKeyFile = ctx.getProperty(PRIVATE_KEY_PATH).
-                    getValue();
+            final String privateKeyFile = ctx.getProperty(PRIVATE_KEY_PATH).getValue();
             if (privateKeyFile != null) {
-                jsch.addIdentity(privateKeyFile, ctx.
-                        getProperty(PRIVATE_KEY_PASSPHRASE).
-                        getValue());
+                jsch.addIdentity(privateKeyFile, ctx.getProperty(PRIVATE_KEY_PASSPHRASE).getValue());
             }
 
-            final String password = ctx.getProperty(FileTransfer.PASSWORD).
-                    getValue();
+            final String password = ctx.getProperty(FileTransfer.PASSWORD).getValue();
             if (password != null) {
                 session.setPassword(password);
             }
 
-            session.setTimeout(ctx.getProperty(FileTransfer.CONNECTION_TIMEOUT).
-                    asTimePeriod(TimeUnit.MILLISECONDS).
-                    intValue());
+            session.setTimeout(ctx.getProperty(FileTransfer.CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
             session.connect();
             this.session = session;
             this.closed = false;
 
             sftp = (ChannelSftp) session.openChannel("sftp");
             sftp.connect();
-            session.setTimeout(ctx.getProperty(FileTransfer.DATA_TIMEOUT).
-                    asTimePeriod(TimeUnit.MILLISECONDS).
-                    intValue());
-            if (!ctx.getProperty(USE_KEEPALIVE_ON_TIMEOUT).
-                    asBoolean()) {
+            session.setTimeout(ctx.getProperty(FileTransfer.DATA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
+            if (!ctx.getProperty(USE_KEEPALIVE_ON_TIMEOUT).asBoolean()) {
                 session.setServerAliveCountMax(0); // do not send keepalive message on SocketTimeoutException
             }
             this.homeDir = sftp.getHome();
             return sftp;
 
         } catch (final SftpException | JSchException e) {
-            throw new IOException("Failed to obtain connection to remote host due to " + e.
-                    toString(), e);
+            throw new IOException("Failed to obtain connection to remote host due to " + e.toString(), e);
         }
     }
 
@@ -500,9 +431,7 @@ public class SFTPTransfer implements FileTransfer {
                 sftp.exit();
             }
         } catch (final Exception ex) {
-            logger.
-                    warn("Failed to close ChannelSftp due to {}", new Object[]{ex.
-                        toString()}, ex);
+            logger.warn("Failed to close ChannelSftp due to {}", new Object[]{ex.toString()}, ex);
         }
         sftp = null;
 
@@ -511,8 +440,7 @@ public class SFTPTransfer implements FileTransfer {
                 session.disconnect();
             }
         } catch (final Exception ex) {
-            logger.warn("Failed to close session due to {}", new Object[]{ex.
-                toString()}, ex);
+            logger.warn("Failed to close session due to {}", new Object[]{ex.toString()}, ex);
         }
         session = null;
     }
@@ -552,8 +480,7 @@ public class SFTPTransfer implements FileTransfer {
 
         LsEntry matchingEntry = null;
         for (final LsEntry entry : vector) {
-            if (entry.getFilename().
-                    equalsIgnoreCase(filename)) {
+            if (entry.getFilename().equalsIgnoreCase(filename)) {
                 matchingEntry = entry;
                 break;
             }
@@ -567,22 +494,15 @@ public class SFTPTransfer implements FileTransfer {
         final ChannelSftp sftp = getChannel(flowFile);
 
         // destination path + filename
-        final String fullPath = (path == null)
-                ? filename
-                : (path.endsWith("/")) ? path + filename : path + "/" + filename;
+        final String fullPath = (path == null) ? filename : (path.endsWith("/")) ? path + filename : path + "/" + filename;
 
         // temporary path + filename
-        String tempFilename = ctx.getProperty(TEMP_FILENAME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
+        String tempFilename = ctx.getProperty(TEMP_FILENAME).evaluateAttributeExpressions(flowFile).getValue();
         if (tempFilename == null) {
-            final boolean dotRename = ctx.getProperty(DOT_RENAME).
-                    asBoolean();
+            final boolean dotRename = ctx.getProperty(DOT_RENAME).asBoolean();
             tempFilename = dotRename ? "." + filename : filename;
         }
-        final String tempPath = (path == null)
-                ? tempFilename
-                : (path.endsWith("/")) ? path + tempFilename : path + "/" + tempFilename;
+        final String tempPath = (path == null) ? tempFilename : (path.endsWith("/")) ? path + tempFilename : path + "/" + tempFilename;
 
         try {
             sftp.put(content, tempPath);
@@ -590,61 +510,45 @@ public class SFTPTransfer implements FileTransfer {
             throw new IOException("Unable to put content to " + fullPath + " due to " + e, e);
         }
 
-        final String lastModifiedTime = ctx.getProperty(LAST_MODIFIED_TIME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (lastModifiedTime != null && !lastModifiedTime.trim().
-                isEmpty()) {
+        final String lastModifiedTime = ctx.getProperty(LAST_MODIFIED_TIME).evaluateAttributeExpressions(flowFile).getValue();
+        if (lastModifiedTime != null && !lastModifiedTime.trim().isEmpty()) {
             try {
                 final DateFormat formatter = new SimpleDateFormat(FILE_MODIFY_DATE_ATTR_FORMAT, Locale.US);
                 final Date fileModifyTime = formatter.parse(lastModifiedTime);
                 int time = (int) (fileModifyTime.getTime() / 1000L);
                 sftp.setMtime(tempPath, time);
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set lastModifiedTime on {} to {} due to {}", new Object[]{tempPath, lastModifiedTime, e});
+                logger.error("Failed to set lastModifiedTime on {} to {} due to {}", new Object[]{tempPath, lastModifiedTime, e});
             }
         }
 
-        final String permissions = ctx.getProperty(PERMISSIONS).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (permissions != null && !permissions.trim().
-                isEmpty()) {
+        final String permissions = ctx.getProperty(PERMISSIONS).evaluateAttributeExpressions(flowFile).getValue();
+        if (permissions != null && !permissions.trim().isEmpty()) {
             try {
                 int perms = numberPermissions(permissions);
                 if (perms >= 0) {
                     sftp.chmod(perms, tempPath);
                 }
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set permission on {} to {} due to {}", new Object[]{tempPath, permissions, e});
+                logger.error("Failed to set permission on {} to {} due to {}", new Object[]{tempPath, permissions, e});
             }
         }
 
-        final String owner = ctx.getProperty(REMOTE_OWNER).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (owner != null && !owner.trim().
-                isEmpty()) {
+        final String owner = ctx.getProperty(REMOTE_OWNER).evaluateAttributeExpressions(flowFile).getValue();
+        if (owner != null && !owner.trim().isEmpty()) {
             try {
                 sftp.chown(Integer.parseInt(owner), tempPath);
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set owner on {} to {} due to {}", new Object[]{tempPath, owner, e});
+                logger.error("Failed to set owner on {} to {} due to {}", new Object[]{tempPath, owner, e});
             }
         }
 
-        final String group = ctx.getProperty(REMOTE_GROUP).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (group != null && !group.trim().
-                isEmpty()) {
+        final String group = ctx.getProperty(REMOTE_GROUP).evaluateAttributeExpressions(flowFile).getValue();
+        if (group != null && !group.trim().isEmpty()) {
             try {
                 sftp.chgrp(Integer.parseInt(group), tempPath);
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set group on {} to {} due to {}", new Object[]{tempPath, group, e});
+                logger.error("Failed to set group on {} to {} due to {}", new Object[]{tempPath, group, e});
             }
         }
 
@@ -668,8 +572,7 @@ public class SFTPTransfer implements FileTransfer {
         int number = -1;
         final Pattern rwxPattern = Pattern.compile("^[rwx-]{9}$");
         final Pattern numPattern = Pattern.compile("\\d+");
-        if (rwxPattern.matcher(perms).
-                matches()) {
+        if (rwxPattern.matcher(perms).matches()) {
             number = 0;
             if (perms.charAt(0) == 'r') {
                 number |= 0x100;
@@ -698,8 +601,7 @@ public class SFTPTransfer implements FileTransfer {
             if (perms.charAt(8) == 'x') {
                 number |= 0x1;
             }
-        } else if (numPattern.matcher(perms).
-                matches()) {
+        } else if (numPattern.matcher(perms).matches()) {
             try {
                 number = Integer.parseInt(perms, 8);
             } catch (NumberFormatException ignore) {
@@ -717,8 +619,7 @@ public class SFTPTransfer implements FileTransfer {
 
             @Override
             public void log(int level, String message) {
-                LoggerFactory.getLogger(SFTPTransfer.class).
-                        debug("SFTP Log: {}", message);
+                LoggerFactory.getLogger(SFTPTransfer.class).debug("SFTP Log: {}", message);
             }
         });
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPUtils.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPUtils.java
index 9121089..fc6275f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPUtils.java
@@ -40,80 +40,80 @@ import com.jcraft.jsch.SftpException;
 
 public class SFTPUtils {
 
-    public static final PropertyDescriptor SFTP_PRIVATEKEY_PATH = new PropertyDescriptor.Builder().
-            required(false).
-            description("sftp.privatekey.path").
-            defaultValue(null).
-            name("sftp.privatekey.path").
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            sensitive(false).
-            build();
-    public static final PropertyDescriptor REMOTE_PASSWORD = new PropertyDescriptor.Builder().
-            required(false).
-            description("remote.password").
-            defaultValue(null).
-            name("remote.password").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            sensitive(true).
-            build();
-    public static final PropertyDescriptor SFTP_PRIVATEKEY_PASSPHRASE = new PropertyDescriptor.Builder().
-            required(false).
-            description("sftp.privatekey.passphrase").
-            defaultValue(null).
-            name("sftp.privatekey.passphrase").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            sensitive(true).
-            build();
-    public static final PropertyDescriptor SFTP_PORT = new PropertyDescriptor.Builder().
-            required(false).
-            description("sftp.port").
-            defaultValue(null).
-            name("sftp.port").
-            addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR).
-            sensitive(false).
-            build();
-    public static final PropertyDescriptor NETWORK_DATA_TIMEOUT = new PropertyDescriptor.Builder().
-            required(false).
-            description("network.data.timeout").
-            defaultValue(null).
-            name("network.data.timeout").
-            addValidator(StandardValidators.INTEGER_VALIDATOR).
-            sensitive(false).
-            build();
-    public static final PropertyDescriptor SFTP_HOSTKEY_FILENAME = new PropertyDescriptor.Builder().
-            required(false).
-            description("sftp.hostkey.filename").
-            defaultValue(null).
-            name("sftp.hostkey.filename").
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            sensitive(false).
-            build();
-    public static final PropertyDescriptor NETWORK_CONNECTION_TIMEOUT = new PropertyDescriptor.Builder().
-            required(false).
-            description("network.connection.timeout").
-            defaultValue(null).
-            name("network.connection.timeout").
-            addValidator(StandardValidators.INTEGER_VALIDATOR).
-            sensitive(false).
-            build();
+    public static final PropertyDescriptor SFTP_PRIVATEKEY_PATH = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("sftp.privatekey.path")
+            .defaultValue(null)
+            .name("sftp.privatekey.path")
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .sensitive(false)
+            .build();
+    public static final PropertyDescriptor REMOTE_PASSWORD = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("remote.password")
+            .defaultValue(null)
+            .name("remote.password")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .sensitive(true)
+            .build();
+    public static final PropertyDescriptor SFTP_PRIVATEKEY_PASSPHRASE = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("sftp.privatekey.passphrase")
+            .defaultValue(null)
+            .name("sftp.privatekey.passphrase")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .sensitive(true)
+            .build();
+    public static final PropertyDescriptor SFTP_PORT = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("sftp.port")
+            .defaultValue(null)
+            .name("sftp.port")
+            .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+            .sensitive(false)
+            .build();
+    public static final PropertyDescriptor NETWORK_DATA_TIMEOUT = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("network.data.timeout")
+            .defaultValue(null)
+            .name("network.data.timeout")
+            .addValidator(StandardValidators.INTEGER_VALIDATOR)
+            .sensitive(false)
+            .build();
+    public static final PropertyDescriptor SFTP_HOSTKEY_FILENAME = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("sftp.hostkey.filename")
+            .defaultValue(null)
+            .name("sftp.hostkey.filename")
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .sensitive(false)
+            .build();
+    public static final PropertyDescriptor NETWORK_CONNECTION_TIMEOUT = new PropertyDescriptor.Builder()
+            .required(false)
+            .description("network.connection.timeout")
+            .defaultValue(null)
+            .name("network.connection.timeout")
+            .addValidator(StandardValidators.INTEGER_VALIDATOR)
+            .sensitive(false)
+            .build();
 
     // required properties
-    public static final PropertyDescriptor REMOTE_HOSTNAME = new PropertyDescriptor.Builder().
-            required(true).
-            description("remote.hostname").
-            defaultValue(null).
-            name("remote.hostname").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            sensitive(false).
-            build();
-    public static final PropertyDescriptor REMOTE_USERNAME = new PropertyDescriptor.Builder().
-            required(true).
-            description("remote.username").
-            defaultValue(null).
-            name("remote.username").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            sensitive(false).
-            build();
+    public static final PropertyDescriptor REMOTE_HOSTNAME = new PropertyDescriptor.Builder()
+            .required(true)
+            .description("remote.hostname")
+            .defaultValue(null)
+            .name("remote.hostname")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .sensitive(false)
+            .build();
+    public static final PropertyDescriptor REMOTE_USERNAME = new PropertyDescriptor.Builder()
+            .required(true)
+            .description("remote.username")
+            .defaultValue(null)
+            .name("remote.username")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .sensitive(false)
+            .build();
 
     private static final List<PropertyDescriptor> propertyDescriptors = new ArrayList<>();
 
@@ -149,22 +149,16 @@ public class SFTPUtils {
         File dir = new File(dirPath);
         String currentWorkingDirectory = null;
         boolean dirExists = false;
-        final String forwardPaths = dir.getPath().
-                replaceAll(Matcher.quoteReplacement("\\"), Matcher.
-                        quoteReplacement("/"));
+        final String forwardPaths = dir.getPath().replaceAll(Matcher.quoteReplacement("\\"), Matcher.quoteReplacement("/"));
         try {
             currentWorkingDirectory = sftp.pwd();
-            logger.
-                    debug(proc + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.
-                            getPath());
+            logger.debug(proc + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.getPath());
             //always use forward paths for long string attempt
             sftp.cd(forwardPaths);
             dirExists = true;
-            logger.
-                    debug(proc + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'");
+            logger.debug(proc + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'");
         } catch (final SftpException sftpe) {
-            logger.
-                    debug(proc + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
+            logger.debug(proc + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
         }
         if (dirExists) {
             return;
@@ -185,14 +179,12 @@ public class SFTPUtils {
             try {
                 sftp.cd(dirName);
             } catch (final SftpException sftpe) {
-                logger.
-                        debug(proc + " creating new directory and changing to it " + dirName);
+                logger.debug(proc + " creating new directory and changing to it " + dirName);
                 try {
                     sftp.mkdir(dirName);
                     sftp.cd(dirName);
                 } catch (final SftpException e) {
-                    throw new IOException(proc + " could not make/change directory to [" + dirName + "] [" + e.
-                            getLocalizedMessage() + "]", e);
+                    throw new IOException(proc + " could not make/change directory to [" + dirName + "] [" + e.getLocalizedMessage() + "]", e);
                 }
             }
         }
@@ -205,8 +197,7 @@ public class SFTPUtils {
 
         final Hashtable<String, String> newOptions = new Hashtable<>();
 
-        Session session = jsch.
-                getSession(conf.username, conf.hostname, conf.port);
+        Session session = jsch.getSession(conf.username, conf.hostname, conf.port);
 
         final String hostKeyVal = conf.hostkeyFile;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/UDPStreamConsumer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/UDPStreamConsumer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/UDPStreamConsumer.java
index 84f431d..ad2cca5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/UDPStreamConsumer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/UDPStreamConsumer.java
@@ -109,8 +109,7 @@ public class UDPStreamConsumer implements StreamConsumer {
             }
             // time to make a new flow file
             newFlowFile = session.create();
-            newFlowFile = session.
-                    putAttribute(newFlowFile, "source.stream.identifier", uniqueId);
+            newFlowFile = session.putAttribute(newFlowFile, "source.stream.identifier", uniqueId);
             newFlowFile = session.write(newFlowFile, udpCallback);
             if (newFlowFile.getSize() == 0) {
                 session.remove(newFlowFile);
@@ -123,8 +122,7 @@ public class UDPStreamConsumer implements StreamConsumer {
                 try {
                     session.remove(newFlowFile);
                 } catch (final Exception ex2) {
-                    logger.
-                            warn("Unable to delete partial flow file due to: ", ex2);
+                    logger.warn("Unable to delete partial flow file due to: ", ex2);
                 }
             }
             throw new IOException("Problem while processing data stream", ex);
@@ -158,21 +156,17 @@ public class UDPStreamConsumer implements StreamConsumer {
             return false;
         }
         UDPStreamConsumer rhs = (UDPStreamConsumer) obj;
-        return new EqualsBuilder().appendSuper(super.equals(obj)).
-                append(uniqueId, rhs.uniqueId).
-                isEquals();
+        return new EqualsBuilder().appendSuper(super.equals(obj)).append(uniqueId, rhs.uniqueId).isEquals();
     }
 
     @Override
     public final int hashCode() {
-        return new HashCodeBuilder(17, 37).append(uniqueId).
-                toHashCode();
+        return new HashCodeBuilder(17, 37).append(uniqueId).toHashCode();
     }
 
     @Override
     public final String toString() {
-        return new ToStringBuilder(this).append(uniqueId).
-                toString();
+        return new ToStringBuilder(this).append(uniqueId).toString();
     }
 
     public static final class UDPConsumerCallback implements OutputStreamCallback {
@@ -194,11 +188,9 @@ public class UDPStreamConsumer implements StreamConsumer {
         public void process(final OutputStream out) throws IOException {
             try {
                 long totalBytes = 0L;
-                try (WritableByteChannel wbc = Channels.
-                        newChannel(new BufferedOutputStream(out))) {
+                try (WritableByteChannel wbc = Channels.newChannel(new BufferedOutputStream(out))) {
                     ByteBuffer buffer = null;
-                    while ((buffer = filledBuffers.
-                            poll(50, TimeUnit.MILLISECONDS)) != null) {
+                    while ((buffer = filledBuffers.poll(50, TimeUnit.MILLISECONDS)) != null) {
                         int bytesWrittenThisPass = 0;
                         try {
                             while (buffer.hasRemaining()) {
@@ -209,8 +201,7 @@ public class UDPStreamConsumer implements StreamConsumer {
                                 break;// this is enough data
                             }
                         } finally {
-                            bufferPool.
-                                    returnBuffer(buffer, bytesWrittenThisPass);
+                            bufferPool.returnBuffer(buffer, bytesWrittenThisPass);
                         }
                     }
                 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase32InputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase32InputStream.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase32InputStream.java
index 692947d..711efce 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase32InputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase32InputStream.java
@@ -23,8 +23,7 @@ import java.util.Arrays;
 import org.apache.commons.codec.binary.Base32;
 
 /**
- * An InputStream that throws an IOException if any byte is read that is not a
- * valid Base32 character. Whitespace is considered valid.
+ * An InputStream that throws an IOException if any byte is read that is not a valid Base32 character. Whitespace is considered valid.
  */
 public class ValidatingBase32InputStream extends FilterInputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase64InputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase64InputStream.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase64InputStream.java
index 6867681..5002906 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase64InputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/ValidatingBase64InputStream.java
@@ -24,8 +24,7 @@ import java.util.Arrays;
 import org.apache.commons.codec.binary.Base64;
 
 /**
- * An InputStream that throws an IOException if any byte is read that is not a
- * valid Base64 character. Whitespace is considered valid.
+ * An InputStream that throws an IOException if any byte is read that is not a valid Base64 character. Whitespace is considered valid.
  */
 public class ValidatingBase64InputStream extends FilterInputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageConsumer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageConsumer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageConsumer.java
index 7d16b73..fca6a70 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageConsumer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageConsumer.java
@@ -55,22 +55,19 @@ public class WrappedMessageConsumer {
         try {
             connection.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
 
         try {
             session.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
 
         try {
             consumer.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageProducer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageProducer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageProducer.java
index a2d7459..fc01b02 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageProducer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/WrappedMessageProducer.java
@@ -55,22 +55,19 @@ public class WrappedMessageProducer {
         try {
             connection.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
 
         try {
             session.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
 
         try {
             producer.close();
         } catch (final JMSException e) {
-            logger.
-                    warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
+            logger.warn("unable to close connection to JMS Server due to {}; resources may not be cleaned up appropriately", e);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/TestIngestAndUpdate.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/TestIngestAndUpdate.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/TestIngestAndUpdate.java
index 70f2579..c9ed9f9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/TestIngestAndUpdate.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/TestIngestAndUpdate.java
@@ -21,8 +21,7 @@ public class TestIngestAndUpdate {
 
     public static void main(String[] args) throws IOException {
         byte[] bytes = new byte[1024];
-        System.out.write(System.getProperty("user.dir").
-                getBytes());
+        System.out.write(System.getProperty("user.dir").getBytes());
         System.out.println(":ModifiedResult");
         int numRead = 0;
         while ((numRead = System.in.read(bytes)) != -1) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/CaptureServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/CaptureServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/CaptureServlet.java
index 772ca0b..d6c87d6 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/CaptureServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/CaptureServlet.java
@@ -48,8 +48,7 @@ public class CaptureServlet extends HttpServlet {
 
     @Override
     protected void doHead(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
-        response.
-                setHeader("Accept", "application/flowfile-v3,application/flowfile-v2");
+        response.setHeader("Accept", "application/flowfile-v3,application/flowfile-v2");
         response.setHeader("x-nifi-transfer-protocol-version", "1");
         response.setHeader("Accept-Encoding", "gzip");
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/RESTServiceContentModified.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/RESTServiceContentModified.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/RESTServiceContentModified.java
index 580450f..ec3211c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/RESTServiceContentModified.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/RESTServiceContentModified.java
@@ -47,11 +47,9 @@ public class RESTServiceContentModified extends HttpServlet {
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
         response.setContentType("application/json");
-        if (ifNoneMatch != null && ifNoneMatch.length() > 0 && !IGNORE_ETAG && Integer.
-                parseInt(ifNoneMatch) == ETAG) {
+        if (ifNoneMatch != null && ifNoneMatch.length() > 0 && !IGNORE_ETAG && Integer.parseInt(ifNoneMatch) == ETAG) {
             response.setStatus(304);
-            response.setHeader("Last-Modified", dateFormat.
-                    format(modificationDate));
+            response.setHeader("Last-Modified", dateFormat.format(modificationDate));
             response.setHeader("ETag", Integer.toString(ETAG));
             return;
         }
@@ -59,16 +57,14 @@ public class RESTServiceContentModified extends HttpServlet {
         long date = -1;
         if (ifModifiedSince != null && ifModifiedSince.length() > 0 && !IGNORE_LAST_MODIFIED) {
             try {
-                date = dateFormat.parse(ifModifiedSince).
-                        getTime();
+                date = dateFormat.parse(ifModifiedSince).getTime();
             } catch (Exception e) {
 
             }
         }
         if (date >= modificationDate) {
             response.setStatus(304);
-            response.setHeader("Last-Modified", dateFormat.
-                    format(modificationDate));
+            response.setHeader("Last-Modified", dateFormat.format(modificationDate));
             response.setHeader("ETag", Integer.toString(ETAG));
             return;
         }
@@ -76,8 +72,7 @@ public class RESTServiceContentModified extends HttpServlet {
         response.setStatus(200);
         response.setHeader("Last-Modified", dateFormat.format(modificationDate));
         response.setHeader("ETag", Integer.toString(ETAG));
-        response.getOutputStream().
-                println(result);
+        response.getOutputStream().println(result);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestBase64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestBase64EncodeContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestBase64EncodeContent.java
index 4005db7..eef4dbc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestBase64EncodeContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestBase64EncodeContent.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.Base64EncodeContent;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Paths;
@@ -31,68 +30,53 @@ public class TestBase64EncodeContent {
 
     @Test
     public void testRoundTrip() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new Base64EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new Base64EncodeContent());
 
-        testRunner.
-                setProperty(Base64EncodeContent.MODE, Base64EncodeContent.ENCODE_MODE);
+        testRunner.setProperty(Base64EncodeContent.MODE, Base64EncodeContent.ENCODE_MODE);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
         testRunner.run();
 
-        testRunner.
-                assertAllFlowFilesTransferred(Base64EncodeContent.REL_SUCCESS, 1);
+        testRunner.assertAllFlowFilesTransferred(Base64EncodeContent.REL_SUCCESS, 1);
 
-        MockFlowFile flowFile = testRunner.
-                getFlowFilesForRelationship(Base64EncodeContent.REL_SUCCESS).
-                get(0);
+        MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(Base64EncodeContent.REL_SUCCESS).get(0);
         testRunner.assertQueueEmpty();
 
-        testRunner.
-                setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
+        testRunner.setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
         testRunner.enqueue(flowFile);
         testRunner.clearTransferState();
         testRunner.run();
-        testRunner.
-                assertAllFlowFilesTransferred(Base64EncodeContent.REL_SUCCESS, 1);
+        testRunner.assertAllFlowFilesTransferred(Base64EncodeContent.REL_SUCCESS, 1);
 
-        flowFile = testRunner.
-                getFlowFilesForRelationship(Base64EncodeContent.REL_SUCCESS).
-                get(0);
+        flowFile = testRunner.getFlowFilesForRelationship(Base64EncodeContent.REL_SUCCESS).get(0);
         flowFile.assertContentEquals(new File("src/test/resources/hello.txt"));
     }
 
     @Test
     public void testFailDecodeNotBase64() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new Base64EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new Base64EncodeContent());
 
-        testRunner.
-                setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
+        testRunner.setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
         testRunner.run();
 
-        testRunner.
-                assertAllFlowFilesTransferred(Base64EncodeContent.REL_FAILURE, 1);
+        testRunner.assertAllFlowFilesTransferred(Base64EncodeContent.REL_FAILURE, 1);
     }
 
     @Test
     public void testFailDecodeNotBase64ButIsAMultipleOfFourBytes() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new Base64EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new Base64EncodeContent());
 
-        testRunner.
-                setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
+        testRunner.setProperty(Base64EncodeContent.MODE, Base64EncodeContent.DECODE_MODE);
 
         testRunner.enqueue("four@@@@multiple".getBytes());
         testRunner.clearTransferState();
         testRunner.run();
 
-        testRunner.
-                assertAllFlowFilesTransferred(Base64EncodeContent.REL_FAILURE, 1);
+        testRunner.assertAllFlowFilesTransferred(Base64EncodeContent.REL_FAILURE, 1);
     }
 
 }


[32/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterConnectionStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterConnectionStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterConnectionStatusEntity.java
index 732a4b7..f211cc4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterConnectionStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterConnectionStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterConnectionStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterConnectionStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterConnectionStatusDTO.
  */
 @XmlRootElement(name = "clusterConnectionStatusEntity")
 public class ClusterConnectionStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterEntity.java
index 8306b0f..c59ce01 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ClusterDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterDTO.
  */
 @XmlRootElement(name = "clusterEntity")
 public class ClusterEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterPortStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterPortStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterPortStatusEntity.java
index 16a2497..477b6ec 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterPortStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterPortStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterPortStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterPortStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterPortStatusDTO.
  */
 @XmlRootElement(name = "clusterPortStatusEntity")
 public class ClusterPortStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessGroupStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessGroupStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessGroupStatusEntity.java
index cddb21a..f8b7e11 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessGroupStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessGroupStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterProcessGroupStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterProcessGroupStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterProcessGroupStatusDTO.
  */
 @XmlRootElement(name = "clusterProcessGroupStatusEntity")
 public class ClusterProcessGroupStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessorStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessorStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessorStatusEntity.java
index 68e5c5c..2b8220f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessorStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterProcessorStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterProcessorStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterProcessorStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterProcessorStatusDTO.
  */
 @XmlRootElement(name = "clusterProcessorStatusEntity")
 public class ClusterProcessorStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterRemoteProcessGroupStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterRemoteProcessGroupStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterRemoteProcessGroupStatusEntity.java
index 8a3da7c..66569c5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterRemoteProcessGroupStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterRemoteProcessGroupStatusEntity.java
@@ -20,9 +20,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterRemoteProcessGroupStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterRemoteProcessGroupStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a
+ * ClusterRemoteProcessGroupStatusDTO.
  */
 @XmlRootElement(name = "clusterRemoteProcessGroupStatusEntity")
 public class ClusterRemoteProcessGroupStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterSearchResultsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterSearchResultsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterSearchResultsEntity.java
index ad1c1a5..48fc01f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterSearchResultsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterSearchResultsEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.search.NodeSearchResultDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to NodeSearchResultDTOs.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to NodeSearchResultDTOs.
  */
 @XmlRootElement(name = "clusterSearchResultsEntity")
 public class ClusterSearchResultsEntity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusEntity.java
index d22d432..84fffb9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterStatusDTO.
  */
 @XmlRootElement(name = "clusterStatusEntity")
 public class ClusterStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusHistoryEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusHistoryEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusHistoryEntity.java
index 9ed69fd..b13984a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusHistoryEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ClusterStatusHistoryEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ClusterStatusHistoryDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ClusterStatusHistoryDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ClusterStatusHistoryDTO.
  */
 @XmlRootElement(name = "clusterStatusHistoryEntity")
 public class ClusterStatusHistoryEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ComponentHistoryEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ComponentHistoryEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ComponentHistoryEntity.java
index ca68211..a6e705f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ComponentHistoryEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ComponentHistoryEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ComponentHistoryDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ComponentHistoryDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ComponentHistoryDTO.
  */
 @XmlRootElement(name = "componentHistoryEntity")
 public class ComponentHistoryEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionEntity.java
index 2791bfa..b041e51 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ConnectionDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a
- * ConnectionDTO.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a ConnectionDTO.
  */
 @XmlRootElement(name = "connectionEntity")
 public class ConnectionEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionsEntity.java
index 7988d12..5b2b1b0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ConnectionsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ConnectionDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * ConnectionDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of ConnectionDTOs.
  */
 @XmlRootElement(name = "connectionsEntity")
 public class ConnectionsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerConfigurationEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerConfigurationEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerConfigurationEntity.java
index e13903b..22c859e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerConfigurationEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerConfigurationEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ControllerConfigurationDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ControllerConfigurationDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ControllerConfigurationDTO.
  */
 @XmlRootElement(name = "controllerConfigurationEntity")
 public class ControllerConfigurationEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerEntity.java
index 62d0409..706f538 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ControllerDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ControllerDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ControllerDTO.
  */
 @XmlRootElement(name = "controllerEntity")
 public class ControllerEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceEntity.java
index 65c38a9..8f217b5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ControllerServiceDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a
- * controller service.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a controller service.
  */
 @XmlRootElement(name = "controllerServiceEntity")
 public class ControllerServiceEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceReferencingComponentsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceReferencingComponentsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceReferencingComponentsEntity.java
index 6999927..c6be79d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceReferencingComponentsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceReferencingComponentsEntity.java
@@ -21,9 +21,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ControllerServiceReferencingComponentDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * controller services referencing components.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of controller services referencing
+ * components.
  */
 @XmlRootElement(name = "controllerServiceReferencingComponentsEntity")
 public class ControllerServiceReferencingComponentsEntity extends Entity {
@@ -31,8 +30,7 @@ public class ControllerServiceReferencingComponentsEntity extends Entity {
     private Set<ControllerServiceReferencingComponentDTO> controllerServiceReferencingComponents;
 
     /**
-     * @return list of controller service referencing components that are being
-     * serialized
+     * @return list of controller service referencing components that are being serialized
      */
     public Set<ControllerServiceReferencingComponentDTO> getControllerServiceReferencingComponents() {
         return controllerServiceReferencingComponents;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceTypesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceTypesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceTypesEntity.java
index 580a76e..8b8bae4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceTypesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServiceTypesEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.DocumentedTypeDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * controller service types.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of controller service types.
  */
 @XmlRootElement(name = "controllerServiceTypesEntity")
 public class ControllerServiceTypesEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServicesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServicesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServicesEntity.java
index cda93c4..9c2c366 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServicesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerServicesEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ControllerServiceDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * controller services.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of controller services.
  */
 @XmlRootElement(name = "controllerServicesEntity")
 public class ControllerServicesEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerStatusEntity.java
index 94d8aca..6183546 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ControllerStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ControllerStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ControllerStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ControllerStatusDTO.
  */
 @XmlRootElement(name = "controllerStatusEntity")
 public class ControllerStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CounterEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CounterEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CounterEntity.java
index ef329b8..c84d11e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CounterEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CounterEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.CounterDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response from the API. This particular entity holds a reference to a
- * CounterDTO.
+ * A serialized representation of this class can be placed in the entity body of a response from the API. This particular entity holds a reference to a CounterDTO.
  */
 @XmlRootElement(name = "counterEntity")
 public class CounterEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CountersEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CountersEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CountersEntity.java
index c1991a1..dabd5a1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CountersEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/CountersEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.CountersDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response from the API. This particular entity holds a reference to a
- * CountersDTO.
+ * A serialized representation of this class can be placed in the entity body of a response from the API. This particular entity holds a reference to a CountersDTO.
  */
 @XmlRootElement(name = "countersEntity")
 public class CountersEntity extends Entity {
@@ -30,8 +28,7 @@ public class CountersEntity extends Entity {
     private CountersDTO counters;
 
     /**
-     * @return the counters which contains all the counter groups and a generation
-     * date
+     * @return the counters which contains all the counter groups and a generation date
      */
     public CountersDTO getCounters() {
         return counters;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FlowSnippetEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FlowSnippetEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FlowSnippetEntity.java
index a15cc51..bff4a51 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FlowSnippetEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FlowSnippetEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.FlowSnippetDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a FlowSnippetDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a FlowSnippetDTO.
  */
 @XmlRootElement(name = "flowSnippetEntity")
 public class FlowSnippetEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelEntity.java
index 8b43fb8..241ac51 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.FunnelDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a FunnelDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a FunnelDTO.
  */
 @XmlRootElement(name = "funnelEntity")
 public class FunnelEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelsEntity.java
index 4586c9d..34426db 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/FunnelsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.FunnelDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * FunnelDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of FunnelDTOs.
  */
 @XmlRootElement(name = "funnelsEntity")
 public class FunnelsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/HistoryEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/HistoryEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/HistoryEntity.java
index f817cce..72a4656 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/HistoryEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/HistoryEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.action.HistoryDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a HistoryDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a HistoryDTO.
  */
 @XmlRootElement(name = "historyEntity")
 public class HistoryEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortEntity.java
index dc497b5..f92c478 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.PortDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to an input
- * PortDTO.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to an input PortDTO.
  */
 @XmlRootElement(name = "inputPortEntity")
 public class InputPortEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortsEntity.java
index a112fdd..93a03e7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/InputPortsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.PortDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * input PortDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of input PortDTOs.
  */
 @XmlRootElement(name = "inputPortsEntity")
 public class InputPortsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelEntity.java
index 867d781..535240c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.LabelDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a LabelDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a LabelDTO.
  */
 @XmlRootElement(name = "labelEntity")
 public class LabelEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelsEntity.java
index d4b9681..05dab6c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LabelsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.LabelDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * LabelDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of LabelDTOs.
  */
 @XmlRootElement(name = "labelsEntity")
 public class LabelsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LineageEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LineageEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LineageEntity.java
index 5d443af..6bcf039 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LineageEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/LineageEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.provenance.lineage.LineageDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a LineageDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a LineageDTO.
  */
 @XmlRootElement(name = "lineageEntity")
 public class LineageEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeEntity.java
index ac5b306..75c9c4c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.NodeDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a NodeDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a NodeDTO.
  */
 @XmlRootElement(name = "nodeEntity")
 public class NodeEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeStatusEntity.java
index d7471da..54ba306 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.NodeStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a NodeStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a NodeStatusDTO.
  */
 @XmlRootElement(name = "nodeStatusEntity")
 public class NodeStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeSystemDiagnosticsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeSystemDiagnosticsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeSystemDiagnosticsEntity.java
index 59eafa1..443276c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeSystemDiagnosticsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/NodeSystemDiagnosticsEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.NodeSystemDiagnosticsDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a NodeSystemDiagnosticsDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a NodeSystemDiagnosticsDTO.
  */
 @XmlRootElement(name = "nodeSystemDiagnosticsEntity")
 public class NodeSystemDiagnosticsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortEntity.java
index 7c5145e..4f40aec 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.PortDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to an output
- * PortDTO.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to an output PortDTO.
  */
 @XmlRootElement(name = "outputPortEntity")
 public class OutputPortEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortsEntity.java
index 0127190..e624c52 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/OutputPortsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.PortDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * output PortDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of output PortDTOs.
  */
 @XmlRootElement(name = "outputPortsEntity")
 public class OutputPortsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PrioritizerTypesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PrioritizerTypesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PrioritizerTypesEntity.java
index 2e3f545..7f1dfa3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PrioritizerTypesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PrioritizerTypesEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.DocumentedTypeDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * prioritizer types.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of prioritizer types.
  */
 @XmlRootElement(name = "prioritizerTypesEntity")
 public class PrioritizerTypesEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupEntity.java
index c677ef1..d1ec034 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ProcessGroupDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ProcessGroupDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ProcessGroupDTO.
  */
 @XmlRootElement(name = "processGroupEntity")
 public class ProcessGroupEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupStatusEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupStatusEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupStatusEntity.java
index f0a6e0f..d7d857f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupStatusEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupStatusEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.ProcessGroupStatusDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ProcessGroupStatusDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ProcessGroupStatusDTO.
  */
 @XmlRootElement(name = "processGroupStatusEntity")
 public class ProcessGroupStatusEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupsEntity.java
index 1fdf23f..7acf650 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessGroupsEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ProcessGroupDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a set of ProcessGroupDTOs.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a set of ProcessGroupDTOs.
  */
 @XmlRootElement(name = "processGroupsEntity")
 public class ProcessGroupsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorEntity.java
index 21ad431..e03bb85 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ProcessorDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ProcessorDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ProcessorDTO.
  */
 @XmlRootElement(name = "processorEntity")
 public class ProcessorEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorTypesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorTypesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorTypesEntity.java
index fa584ce..17cf7fd 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorTypesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorTypesEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.DocumentedTypeDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * processor types.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of processor types.
  */
 @XmlRootElement(name = "processorTypesEntity")
 public class ProcessorTypesEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorsEntity.java
index 201f5aa..04db99f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProcessorsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ProcessorDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * ProcessorDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of ProcessorDTOs.
  */
 @XmlRootElement(name = "processorsEntity")
 public class ProcessorsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PropertyDescriptorEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PropertyDescriptorEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PropertyDescriptorEntity.java
index 87ca4a3..478952c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PropertyDescriptorEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/PropertyDescriptorEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.PropertyDescriptorDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a PropertyDescriptorDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a PropertyDescriptorDTO.
  */
 @XmlRootElement(name = "propertyDescriptor")
 public class PropertyDescriptorEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceEventEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceEventEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceEventEntity.java
index 0aa0a55..0396145 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceEventEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceEventEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ProvenanceEventDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ProvenanceEventDTO.
  */
 @XmlRootElement(name = "provenanceEventEntity")
 public class ProvenanceEventEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceOptionsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceOptionsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceOptionsEntity.java
index 01591cb..e4510d6 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceOptionsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ProvenanceOptionsEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.provenance.ProvenanceOptionsDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a ProvenanceOptionsDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a ProvenanceOptionsDTO.
  */
 @XmlRootElement(name = "provenanceOptionsEntity")
 public class ProvenanceOptionsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupEntity.java
index 62a84d6..ba12f40 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a RemoteProcessGroupDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a RemoteProcessGroupDTO.
  */
 @XmlRootElement(name = "remoteProcessGroupEntity")
 public class RemoteProcessGroupEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupPortEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupPortEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupPortEntity.java
index f310b5e..9d5189a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupPortEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupPortEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.RemoteProcessGroupPortDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a RemoteProcessGroupPortDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a RemoteProcessGroupPortDTO.
  */
 @XmlRootElement(name = "remoteProcessGroupPortEntity")
 public class RemoteProcessGroupPortEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupsEntity.java
index a94b42f..42abf23 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/RemoteProcessGroupsEntity.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * RemoteProcessGroupDTOs.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of RemoteProcessGroupDTOs.
  */
 @XmlRootElement(name = "remoteProcessGroupsEntity")
 public class RemoteProcessGroupsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskEntity.java
index 2177564..3c07dd7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ReportingTaskDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a
- * reporting task.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a reporting task.
  */
 @XmlRootElement(name = "reportingTaskEntity")
 public class ReportingTaskEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskTypesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskTypesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskTypesEntity.java
index c527ec7..da99531 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskTypesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTaskTypesEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.DocumentedTypeDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * reporting task types.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of reporting task types.
  */
 @XmlRootElement(name = "reportingTaskTypesEntity")
 public class ReportingTaskTypesEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTasksEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTasksEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTasksEntity.java
index 4f7be11..b493594 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTasksEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ReportingTasksEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.ReportingTaskDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a reference to a list of
- * reporting tasks.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a reference to a list of reporting tasks.
  */
 @XmlRootElement(name = "reportingTasksEntity")
 public class ReportingTasksEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SearchResultsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SearchResultsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SearchResultsEntity.java
index 01e6918..43aa3cf 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SearchResultsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SearchResultsEntity.java
@@ -20,10 +20,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.search.SearchResultsDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to ProcessorSearchResultDTOs, RemoteProcessGroupSearchResultDTOs,
- * and ConnectionSearchResultDTOs.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to ProcessorSearchResultDTOs,
+ * RemoteProcessGroupSearchResultDTOs, and ConnectionSearchResultDTOs.
  */
 @XmlRootElement(name = "searchResultsEntity")
 public class SearchResultsEntity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SnippetEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SnippetEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SnippetEntity.java
index a9b05a1..d20e61d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SnippetEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SnippetEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.SnippetDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a SnippetDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a SnippetDTO.
  */
 @XmlRootElement(name = "snippetEntity")
 public class SnippetEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/StatusHistoryEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/StatusHistoryEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/StatusHistoryEntity.java
index d3c2247..98891ff 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/StatusHistoryEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/StatusHistoryEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.status.StatusHistoryDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a StatusHistoryDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a StatusHistoryDTO.
  */
 @XmlRootElement(name = "statusHistoryEntity")
 public class StatusHistoryEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SystemDiagnosticsEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SystemDiagnosticsEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SystemDiagnosticsEntity.java
index e7baa16..8c8b865 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SystemDiagnosticsEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/SystemDiagnosticsEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.SystemDiagnosticsDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a SystemDiagnosticsDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a SystemDiagnosticsDTO.
  */
 @XmlRootElement(name = "systemDiagnosticsEntity")
 public class SystemDiagnosticsEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplateEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplateEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplateEntity.java
index 8e9f07a..553d686 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplateEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplateEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.TemplateDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a TemplateDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a TemplateDTO.
  */
 @XmlRootElement(name = "templateEntity")
 public class TemplateEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplatesEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplatesEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplatesEntity.java
index 3ff5bc6..3a9f46d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplatesEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/TemplatesEntity.java
@@ -24,9 +24,7 @@ import org.apache.nifi.web.api.dto.TemplateDTO;
 import org.apache.nifi.web.api.dto.util.TimeAdapter;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a set of TemplateDTOs.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a set of TemplateDTOs.
  */
 @XmlRootElement(name = "templatesEntity")
 public class TemplatesEntity extends Entity {


[13/50] [abbrv] incubator-nifi git commit: NIFI-549: Fixed NPE

Posted by mc...@apache.org.
NIFI-549: Fixed NPE


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/ba96e43a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/ba96e43a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/ba96e43a

Branch: refs/heads/NIFI-292
Commit: ba96e43a8e0fa682e9c803228d292969ffd0c686
Parents: 0759660
Author: Mark Payne <ma...@hotmail.com>
Authored: Mon Apr 27 12:01:36 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Mon Apr 27 12:01:36 2015 -0400

----------------------------------------------------------------------
 .../java/org/apache/nifi/processors/GeoEnrichIP.java   | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/ba96e43a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index be03243..1ecb221 100644
--- a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -189,8 +189,17 @@ public class GeoEnrichIP extends AbstractProcessor {
         final Map<String, String> attrs = new HashMap<>();
         attrs.put(new StringBuilder(ipAttributeName).append(".geo.lookup.micros").toString(), String.valueOf(stopWatch.getDuration(TimeUnit.MICROSECONDS)));
         attrs.put(new StringBuilder(ipAttributeName).append(".geo.city").toString(), response.getCity().getName());
-        attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), response.getLocation().getLatitude().toString());
-        attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), response.getLocation().getLongitude().toString());
+        
+        final Double latitude = response.getLocation().getLatitude();
+        if ( latitude != null ) {
+        	attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), latitude.toString());
+        }
+        
+        final Double longitude = response.getLocation().getLongitude();
+        if ( longitude != null ) {
+        	attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), longitude.toString());
+        }
+        
         int i = 0;
         for (final Subdivision subd : response.getSubdivisions()) {
             attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.").append(i).toString(), subd.getName());


[11/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
index b37471e..65bbb36 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
@@ -96,14 +96,14 @@ public class PutJMS extends AbstractProcessor {
     public static final Charset UTF8 = Charset.forName("UTF-8");
     public static final int DEFAULT_MESSAGE_PRIORITY = 4;
 
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("All FlowFiles that are sent to the JMS destination are routed to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("All FlowFiles that cannot be routed to the JMS destination are routed to this relationship").
-            build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("All FlowFiles that are sent to the JMS destination are routed to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("All FlowFiles that cannot be routed to the JMS destination are routed to this relationship")
+            .build();
 
     private final Queue<WrappedMessageProducer> producerQueue = new LinkedBlockingQueue<>();
     private final List<PropertyDescriptor> properties;
@@ -156,10 +156,7 @@ public class PutJMS extends AbstractProcessor {
     @Override
     public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
         final ProcessorLog logger = getLogger();
-        final List<FlowFile> flowFiles = session.get(context.
-                getProperty(BATCH_SIZE).
-                asInteger().
-                intValue());
+        final List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).asInteger().intValue());
         if (flowFiles.isEmpty()) {
             return;
         }
@@ -167,14 +164,10 @@ public class PutJMS extends AbstractProcessor {
         WrappedMessageProducer wrappedProducer = producerQueue.poll();
         if (wrappedProducer == null) {
             try {
-                wrappedProducer = JmsFactory.
-                        createMessageProducer(context, true);
-                logger.info("Connected to JMS server {}", new Object[]{context.
-                    getProperty(URL).
-                    getValue()});
+                wrappedProducer = JmsFactory.createMessageProducer(context, true);
+                logger.info("Connected to JMS server {}", new Object[]{context.getProperty(URL).getValue()});
             } catch (final JMSException e) {
-                logger.
-                        error("Failed to connect to JMS Server due to {}", new Object[]{e});
+                logger.error("Failed to connect to JMS Server due to {}", new Object[]{e});
                 session.transfer(flowFiles, REL_FAILURE);
                 context.yield();
                 return;
@@ -184,9 +177,7 @@ public class PutJMS extends AbstractProcessor {
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
-        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).
-                asDataSize(DataUnit.B).
-                intValue();
+        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
 
         try {
             final Set<FlowFile> successfulFlowFiles = new HashSet<>();
@@ -194,8 +185,7 @@ public class PutJMS extends AbstractProcessor {
             for (FlowFile flowFile : flowFiles) {
                 if (flowFile.getSize() > maxBufferSize) {
                     session.transfer(flowFile, REL_FAILURE);
-                    logger.
-                            warn("Routing {} to failure because its size exceeds the configured max", new Object[]{flowFile});
+                    logger.warn("Routing {} to failure because its size exceeds the configured max", new Object[]{flowFile});
                     continue;
                 }
 
@@ -208,29 +198,18 @@ public class PutJMS extends AbstractProcessor {
                     }
                 });
 
-                final Long ttl = context.getProperty(MESSAGE_TTL).
-                        asTimePeriod(TimeUnit.MILLISECONDS);
+                final Long ttl = context.getProperty(MESSAGE_TTL).asTimePeriod(TimeUnit.MILLISECONDS);
 
-                final String replyToQueueName = context.
-                        getProperty(REPLY_TO_QUEUE).
-                        evaluateAttributeExpressions(flowFile).
-                        getValue();
-                final Destination replyToQueue = replyToQueueName == null ? null : JmsFactory.
-                        createQueue(context, replyToQueueName);
+                final String replyToQueueName = context.getProperty(REPLY_TO_QUEUE).evaluateAttributeExpressions(flowFile).getValue();
+                final Destination replyToQueue = replyToQueueName == null ? null : JmsFactory.createQueue(context, replyToQueueName);
 
                 int priority = DEFAULT_MESSAGE_PRIORITY;
                 try {
-                    final Integer priorityInt = context.
-                            getProperty(MESSAGE_PRIORITY).
-                            evaluateAttributeExpressions(flowFile).
-                            asInteger();
+                    final Integer priorityInt = context.getProperty(MESSAGE_PRIORITY).evaluateAttributeExpressions(flowFile).asInteger();
                     priority = priorityInt == null ? priority : priorityInt;
                 } catch (final NumberFormatException e) {
-                    logger.
-                            warn("Invalid value for JMS Message Priority: {}; defaulting to priority of {}", new Object[]{
-                                context.getProperty(MESSAGE_PRIORITY).
-                                evaluateAttributeExpressions(flowFile).
-                                getValue(), DEFAULT_MESSAGE_PRIORITY});
+                    logger.warn("Invalid value for JMS Message Priority: {}; defaulting to priority of {}",
+                            new Object[]{context.getProperty(MESSAGE_PRIORITY).evaluateAttributeExpressions(flowFile).getValue(), DEFAULT_MESSAGE_PRIORITY});
                 }
 
                 try {
@@ -242,16 +221,14 @@ public class PutJMS extends AbstractProcessor {
                     }
                     producer.send(message);
                 } catch (final JMSException e) {
-                    logger.
-                            error("Failed to send {} to JMS Server due to {}", new Object[]{flowFile, e});
+                    logger.error("Failed to send {} to JMS Server due to {}", new Object[]{flowFile, e});
                     session.transfer(flowFiles, REL_FAILURE);
                     context.yield();
 
                     try {
                         jmsSession.rollback();
                     } catch (final JMSException jmse) {
-                        logger.
-                                warn("Unable to roll back JMS Session due to {}", new Object[]{jmse});
+                        logger.warn("Unable to roll back JMS Session due to {}", new Object[]{jmse});
                     }
 
                     wrappedProducer.close(logger);
@@ -259,22 +236,17 @@ public class PutJMS extends AbstractProcessor {
                 }
 
                 successfulFlowFiles.add(flowFile);
-                session.getProvenanceReporter().
-                        send(flowFile, "jms://" + context.getProperty(URL).
-                                getValue());
+                session.getProvenanceReporter().send(flowFile, "jms://" + context.getProperty(URL).getValue());
             }
 
             try {
                 jmsSession.commit();
 
                 session.transfer(successfulFlowFiles, REL_SUCCESS);
-                final String flowFileDescription = successfulFlowFiles.size() > 10 ? successfulFlowFiles.
-                        size() + " FlowFiles" : successfulFlowFiles.toString();
-                logger.
-                        info("Sent {} to JMS Server and transferred to 'success'", new Object[]{flowFileDescription});
+                final String flowFileDescription = successfulFlowFiles.size() > 10 ? successfulFlowFiles.size() + " FlowFiles" : successfulFlowFiles.toString();
+                logger.info("Sent {} to JMS Server and transferred to 'success'", new Object[]{flowFileDescription});
             } catch (JMSException e) {
-                logger.
-                        error("Failed to commit JMS Session due to {}; rolling back session", new Object[]{e});
+                logger.error("Failed to commit JMS Session due to {}; rolling back session", new Object[]{e});
                 session.rollback();
                 wrappedProducer.close(logger);
             }
@@ -289,22 +261,19 @@ public class PutJMS extends AbstractProcessor {
             final FlowFile flowFile, final Destination replyToQueue, final Integer priority) throws JMSException {
         final Message message;
 
-        switch (context.getProperty(MESSAGE_TYPE).
-                getValue()) {
+        switch (context.getProperty(MESSAGE_TYPE).getValue()) {
             case MSG_TYPE_EMPTY: {
                 message = jmsSession.createTextMessage("");
                 break;
             }
             case MSG_TYPE_STREAM: {
-                final StreamMessage streamMessage = jmsSession.
-                        createStreamMessage();
+                final StreamMessage streamMessage = jmsSession.createStreamMessage();
                 streamMessage.writeBytes(messageContent);
                 message = streamMessage;
                 break;
             }
             case MSG_TYPE_TEXT: {
-                message = jmsSession.
-                        createTextMessage(new String(messageContent, UTF8));
+                message = jmsSession.createTextMessage(new String(messageContent, UTF8));
                 break;
             }
             case MSG_TYPE_MAP: {
@@ -313,8 +282,7 @@ public class PutJMS extends AbstractProcessor {
             }
             case MSG_TYPE_BYTE:
             default: {
-                final BytesMessage bytesMessage = jmsSession.
-                        createBytesMessage();
+                final BytesMessage bytesMessage = jmsSession.createBytesMessage();
                 bytesMessage.writeBytes(messageContent);
                 message = bytesMessage;
             }
@@ -330,8 +298,7 @@ public class PutJMS extends AbstractProcessor {
             message.setJMSPriority(priority);
         }
 
-        if (context.getProperty(ATTRIBUTES_TO_JMS_PROPS).
-                asBoolean()) {
+        if (context.getProperty(ATTRIBUTES_TO_JMS_PROPS).asBoolean()) {
             copyAttributesToJmsProps(flowFile, message);
         }
 
@@ -339,35 +306,25 @@ public class PutJMS extends AbstractProcessor {
     }
 
     /**
-     * Iterates through all of the flow file's metadata and for any metadata key
-     * that starts with <code>jms.</code>, the value for the corresponding key
-     * is written to the JMS message as a property. The name of this property is
-     * equal to the key of the flow file's metadata minus the <code>jms.</code>.
-     * For example, if the flowFile has a metadata entry:
+     * Iterates through all of the flow file's metadata and for any metadata key that starts with <code>jms.</code>, the value for the corresponding key is written to the JMS message as a property.
+     * The name of this property is equal to the key of the flow file's metadata minus the <code>jms.</code>. For example, if the flowFile has a metadata entry:
      * <br /><br />
      * <code>jms.count</code> = <code>8</code>
      * <br /><br />
-     * then the JMS message will have a String property added to it with the
-     * property name <code>count</code> and value <code>8</code>.
+     * then the JMS message will have a String property added to it with the property name <code>count</code> and value <code>8</code>.
      *
-     * If the flow file also has a metadata key with the name
-     * <code>jms.count.type</code>, then the value of that metadata entry will
-     * determine the JMS property type to use for the value. For example, if the
-     * flow file has the following properties:
+     * If the flow file also has a metadata key with the name <code>jms.count.type</code>, then the value of that metadata entry will determine the JMS property type to use for the value. For example,
+     * if the flow file has the following properties:
      * <br /><br />
      * <code>jms.count</code> = <code>8</code><br />
      * <code>jms.count.type</code> = <code>integer</code>
      * <br /><br />
-     * Then <code>message</code> will have an INTEGER property added with the
-     * value 8.
+     * Then <code>message</code> will have an INTEGER property added with the value 8.
      * <br /><br/>
-     * If the type is not valid for the given value (e.g.,
-     * <code>jms.count.type</code> = <code>integer</code> and
-     * <code>jms.count</code> = <code>hello</code>, then this JMS property will
-     * not be added to <code>message</code>.
+     * If the type is not valid for the given value (e.g., <code>jms.count.type</code> = <code>integer</code> and <code>jms.count</code> = <code>hello</code>, then this JMS property will not be added
+     * to <code>message</code>.
      *
-     * @param flowFile The flow file whose metadata should be examined for JMS
-     * properties.
+     * @param flowFile The flow file whose metadata should be examined for JMS properties.
      * @param message The JMS message to which we want to add properties.
      * @throws JMSException ex
      */
@@ -380,49 +337,37 @@ public class PutJMS extends AbstractProcessor {
             final String value = entry.getValue();
 
             if (key.toLowerCase().
-                    startsWith(ATTRIBUTE_PREFIX.toLowerCase())
-                    && !key.toLowerCase().
-                    endsWith(ATTRIBUTE_TYPE_SUFFIX.toLowerCase())) {
+                    startsWith(ATTRIBUTE_PREFIX.toLowerCase()) && !key.toLowerCase().endsWith(ATTRIBUTE_TYPE_SUFFIX.toLowerCase())) {
 
-                final String jmsPropName = key.substring(ATTRIBUTE_PREFIX.
-                        length());
+                final String jmsPropName = key.substring(ATTRIBUTE_PREFIX.length());
                 final String type = attributes.get(key + ATTRIBUTE_TYPE_SUFFIX);
 
                 try {
                     if (type == null || type.equalsIgnoreCase(PROP_TYPE_STRING)) {
                         message.setStringProperty(jmsPropName, value);
                     } else if (type.equalsIgnoreCase(PROP_TYPE_INTEGER)) {
-                        message.setIntProperty(jmsPropName, Integer.
-                                parseInt(value));
+                        message.setIntProperty(jmsPropName, Integer.parseInt(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_BOOLEAN)) {
-                        message.setBooleanProperty(jmsPropName, Boolean.
-                                parseBoolean(value));
+                        message.setBooleanProperty(jmsPropName, Boolean.parseBoolean(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_SHORT)) {
-                        message.setShortProperty(jmsPropName, Short.
-                                parseShort(value));
+                        message.setShortProperty(jmsPropName, Short.parseShort(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_LONG)) {
-                        message.setLongProperty(jmsPropName, Long.
-                                parseLong(value));
+                        message.setLongProperty(jmsPropName, Long.parseLong(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_BYTE)) {
-                        message.setByteProperty(jmsPropName, Byte.
-                                parseByte(value));
+                        message.setByteProperty(jmsPropName, Byte.parseByte(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_DOUBLE)) {
-                        message.setDoubleProperty(jmsPropName, Double.
-                                parseDouble(value));
+                        message.setDoubleProperty(jmsPropName, Double.parseDouble(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_FLOAT)) {
-                        message.setFloatProperty(jmsPropName, Float.
-                                parseFloat(value));
+                        message.setFloatProperty(jmsPropName, Float.parseFloat(value));
                     } else if (type.equalsIgnoreCase(PROP_TYPE_OBJECT)) {
                         message.setObjectProperty(jmsPropName, value);
                     } else {
-                        logger.
-                                warn("Attribute key '{}' for {} has value '{}', but expected one of: integer, string, object, byte, double, float, long, short, boolean; not adding this property",
-                                        new Object[]{key, flowFile, value});
+                        logger.warn("Attribute key '{}' for {} has value '{}', but expected one of: integer, string, object, byte, double, float, long, short, boolean; not adding this property",
+                                new Object[]{key, flowFile, value});
                     }
                 } catch (NumberFormatException e) {
-                    logger.
-                            warn("Attribute key '{}' for {} has value '{}', but attribute key '{}' has value '{}'. Not adding this JMS property",
-                                    new Object[]{key, flowFile, value, key + ATTRIBUTE_TYPE_SUFFIX, PROP_TYPE_INTEGER});
+                    logger.warn("Attribute key '{}' for {} has value '{}', but attribute key '{}' has value '{}'. Not adding this JMS property",
+                            new Object[]{key, flowFile, value, key + ATTRIBUTE_TYPE_SUFFIX, PROP_TYPE_INTEGER});
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index d061c33..acabe08 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -67,54 +67,53 @@ public class ReplaceText extends AbstractProcessor {
     private final Pattern backReferencePattern = Pattern.compile("\\$(\\d+)");
     private static final byte[] ZERO_BYTE_BUFFER = new byte[0];
     // Properties
-    public static final PropertyDescriptor REGEX = new PropertyDescriptor.Builder().
-            name("Regular Expression").
-            description("The Regular Expression to search for in the FlowFile content").
-            required(true).
-            addValidator(StandardValidators.
-                    createRegexValidator(0, Integer.MAX_VALUE, true)).
-            expressionLanguageSupported(true).
-            defaultValue("(.*)").
-            build();
-    public static final PropertyDescriptor REPLACEMENT_VALUE = new PropertyDescriptor.Builder().
-            name("Replacement Value").
-            description("The value to replace the regular expression with. Back-references to Regular Expression capturing groups are supported, but back-references that reference capturing groups that do not exist in the regular expression will be treated as literal value.").
-            required(true).
-            defaultValue("$1").
-            addValidator(Validator.VALID).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder().
-            name("Character Set").
-            description("The Character Set in which the file is encoded").
-            required(true).
-            addValidator(StandardValidators.CHARACTER_SET_VALIDATOR).
-            defaultValue("UTF-8").
-            build();
-    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder().
-            name("Maximum Buffer Size").
-            description("Specifies the maximum amount of data to buffer (per file or per line, depending on the Evaluation Mode) in order to apply the regular expressions. If 'Entire Text' (in Evaluation Mode) is selected and the FlowFile is larger than this value, the FlowFile will be routed to 'failure'. "
-                    + "In 'Line-by-Line' Mode, if a single line is larger than this value, the FlowFile will be routed to 'failure'. A default value of 1 MB is provided, primarily for 'Entire Text' mode. In 'Line-by-Line' Mode, a value such as 8 KB or 16 KB is suggested. This value is ignored and the buffer is not used if 'Regular Expression' is set to '.*'").
-            required(true).
-            addValidator(StandardValidators.DATA_SIZE_VALIDATOR).
-            defaultValue("1 MB").
-            build();
-    public static final PropertyDescriptor EVALUATION_MODE = new PropertyDescriptor.Builder().
-            name("Evaluation Mode").
-            description("Evaluate the 'Regular Expression' against each line (Line-by-Line) or buffer the entire file into memory (Entire Text) and then evaluate the 'Regular Expression'.").
-            allowableValues(LINE_BY_LINE, ENTIRE_TEXT).
-            defaultValue(ENTIRE_TEXT).
-            required(true).
-            build();
+    public static final PropertyDescriptor REGEX = new PropertyDescriptor.Builder()
+            .name("Regular Expression")
+            .description("The Regular Expression to search for in the FlowFile content")
+            .required(true)
+            .addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
+            .expressionLanguageSupported(true)
+            .defaultValue("(.*)")
+            .build();
+    public static final PropertyDescriptor REPLACEMENT_VALUE = new PropertyDescriptor.Builder()
+            .name("Replacement Value")
+            .description("The value to replace the regular expression with. Back-references to Regular Expression capturing groups are supported, but back-references that reference capturing groups that do not exist in the regular expression will be treated as literal value.")
+            .required(true)
+            .defaultValue("$1")
+            .addValidator(Validator.VALID)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
+            .name("Character Set")
+            .description("The Character Set in which the file is encoded")
+            .required(true)
+            .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
+            .defaultValue("UTF-8")
+            .build();
+    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder()
+            .name("Maximum Buffer Size")
+            .description("Specifies the maximum amount of data to buffer (per file or per line, depending on the Evaluation Mode) in order to apply the regular expressions. If 'Entire Text' (in Evaluation Mode) is selected and the FlowFile is larger than this value, the FlowFile will be routed to 'failure'. "
+                    + "In 'Line-by-Line' Mode, if a single line is larger than this value, the FlowFile will be routed to 'failure'. A default value of 1 MB is provided, primarily for 'Entire Text' mode. In 'Line-by-Line' Mode, a value such as 8 KB or 16 KB is suggested. This value is ignored and the buffer is not used if 'Regular Expression' is set to '.*'")
+            .required(true)
+            .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
+            .defaultValue("1 MB")
+            .build();
+    public static final PropertyDescriptor EVALUATION_MODE = new PropertyDescriptor.Builder()
+            .name("Evaluation Mode")
+            .description("Evaluate the 'Regular Expression' against each line (Line-by-Line) or buffer the entire file into memory (Entire Text) and then evaluate the 'Regular Expression'.")
+            .allowableValues(LINE_BY_LINE, ENTIRE_TEXT)
+            .defaultValue(ENTIRE_TEXT)
+            .required(true)
+            .build();
     // Relationships
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("FlowFiles that could not be updated are routed to this relationship").
-            build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("FlowFiles that could not be updated are routed to this relationship")
+            .build();
     //
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -147,19 +146,15 @@ public class ReplaceText extends AbstractProcessor {
 
     @Override
     public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-        final List<FlowFile> flowFiles = session.get(FlowFileFilters.
-                newSizeBasedFilter(1, DataUnit.MB, 100));
+        final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(1, DataUnit.MB, 100));
         if (flowFiles.isEmpty()) {
             return;
         }
 
         final ProcessorLog logger = getLogger();
-        final String unsubstitutedRegex = context.getProperty(REGEX).
-                getValue();
-        String unsubstitutedReplacement = context.getProperty(REPLACEMENT_VALUE).
-                getValue();
-        if (unsubstitutedRegex.equals("(.*)") && unsubstitutedReplacement.
-                equals("$1")) {
+        final String unsubstitutedRegex = context.getProperty(REGEX).getValue();
+        String unsubstitutedReplacement = context.getProperty(REPLACEMENT_VALUE).getValue();
+        if (unsubstitutedRegex.equals("(.*)") && unsubstitutedReplacement.equals("$1")) {
             // This pattern says replace content with itself. We can highly optimize this process by simply transferring
             // all FlowFiles to the 'success' relationship
             session.transfer(flowFiles, REL_SUCCESS);
@@ -180,26 +175,17 @@ public class ReplaceText extends AbstractProcessor {
             }
         };
 
-        final String regexValue = context.getProperty(REGEX).
-                evaluateAttributeExpressions().
-                getValue();
-        final int numCapturingGroups = Pattern.compile(regexValue).
-                matcher("").
-                groupCount();
+        final String regexValue = context.getProperty(REGEX).evaluateAttributeExpressions().getValue();
+        final int numCapturingGroups = Pattern.compile(regexValue).matcher("").groupCount();
 
         final boolean skipBuffer = ".*".equals(unsubstitutedRegex);
 
-        final Charset charset = Charset.forName(context.
-                getProperty(CHARACTER_SET).
-                getValue());
-        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).
-                asDataSize(DataUnit.B).
-                intValue();
+        final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue());
+        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
 
         final byte[] buffer = skipBuffer ? ZERO_BYTE_BUFFER : new byte[maxBufferSize];
 
-        final String evaluateMode = context.getProperty(EVALUATION_MODE).
-                getValue();
+        final String evaluateMode = context.getProperty(EVALUATION_MODE).getValue();
 
         for (FlowFile flowFile : flowFiles) {
             if (evaluateMode.equalsIgnoreCase(ENTIRE_TEXT)) {
@@ -209,11 +195,8 @@ public class ReplaceText extends AbstractProcessor {
                 }
             }
 
-            String replacement = context.getProperty(REPLACEMENT_VALUE).
-                    evaluateAttributeExpressions(flowFile, escapeBackRefDecorator).
-                    getValue();
-            final Matcher backRefMatcher = backReferencePattern.
-                    matcher(replacement);
+            String replacement = context.getProperty(REPLACEMENT_VALUE).evaluateAttributeExpressions(flowFile, escapeBackRefDecorator).getValue();
+            final Matcher backRefMatcher = backReferencePattern.matcher(replacement);
             while (backRefMatcher.find()) {
                 final String backRefNum = backRefMatcher.group(1);
                 if (backRefNum.startsWith("0")) {
@@ -231,8 +214,7 @@ public class ReplaceText extends AbstractProcessor {
                 }
 
                 if (backRefIndex > numCapturingGroups) {
-                    final StringBuilder sb = new StringBuilder(replacement.
-                            length() + 1);
+                    final StringBuilder sb = new StringBuilder(replacement.length() + 1);
                     final int groupStart = backRefMatcher.start(1);
 
                     sb.append(replacement.substring(0, groupStart - 1));
@@ -250,14 +232,12 @@ public class ReplaceText extends AbstractProcessor {
             if (skipBuffer) {
                 final StopWatch stopWatch = new StopWatch(true);
                 if (evaluateMode.equalsIgnoreCase(ENTIRE_TEXT)) {
-                    flowFile = session.
-                            write(flowFile, new OutputStreamCallback() {
-                                @Override
-                                public void process(final OutputStream out) throws IOException {
-                                    out.
-                                    write(replacementValue.getBytes(charset));
-                                }
-                            });
+                    flowFile = session.write(flowFile, new OutputStreamCallback() {
+                        @Override
+                        public void process(final OutputStream out) throws IOException {
+                            out.write(replacementValue.getBytes(charset));
+                        }
+                    });
                 } else {
                     flowFile = session.write(flowFile, new StreamCallback() {
                         @Override
@@ -271,19 +251,14 @@ public class ReplaceText extends AbstractProcessor {
                         }
                     });
                 }
-                session.getProvenanceReporter().
-                        modifyContent(flowFile, stopWatch.
-                                getElapsed(TimeUnit.MILLISECONDS));
+                session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
                 session.transfer(flowFile, REL_SUCCESS);
-                logger.
-                        info("Transferred {} to 'success'", new Object[]{flowFile});
+                logger.info("Transferred {} to 'success'", new Object[]{flowFile});
                 continue;
             }
 
             final StopWatch stopWatch = new StopWatch(true);
-            final String regex = context.getProperty(REGEX).
-                    evaluateAttributeExpressions(flowFile, quotedAttributeDecorator).
-                    getValue();
+            final String regex = context.getProperty(REGEX).evaluateAttributeExpressions(flowFile, quotedAttributeDecorator).getValue();
 
             if (evaluateMode.equalsIgnoreCase(ENTIRE_TEXT)) {
                 final int flowFileSize = (int) flowFile.getSize();
@@ -292,8 +267,7 @@ public class ReplaceText extends AbstractProcessor {
                     public void process(final InputStream in, final OutputStream out) throws IOException {
                         StreamUtils.fillBuffer(in, buffer, false);
                         final String contentString = new String(buffer, 0, flowFileSize, charset);
-                        final String updatedValue = contentString.
-                                replaceAll(regex, replacementValue);
+                        final String updatedValue = contentString.replaceAll(regex, replacementValue);
                         out.write(updatedValue.getBytes(charset));
                     }
                 });
@@ -305,8 +279,7 @@ public class ReplaceText extends AbstractProcessor {
                                 BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(out, charset));) {
                             String oneLine;
                             while (null != (oneLine = br.readLine())) {
-                                final String updatedValue = oneLine.
-                                        replaceAll(regex, replacementValue);
+                                final String updatedValue = oneLine.replaceAll(regex, replacementValue);
                                 bw.write(updatedValue);
                             }
                         }
@@ -315,9 +288,7 @@ public class ReplaceText extends AbstractProcessor {
             }
 
             logger.info("Transferred {} to 'success'", new Object[]{flowFile});
-            session.getProvenanceReporter().
-                    modifyContent(flowFile, stopWatch.
-                            getElapsed(TimeUnit.MILLISECONDS));
+            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             session.transfer(flowFile, REL_SUCCESS);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
index a8a2919..5be2b69 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
@@ -71,62 +71,60 @@ import org.apache.commons.lang3.StringUtils;
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that matches the Regular Expression with some alternate value provided in a mapping file.")
 public class ReplaceTextWithMapping extends AbstractProcessor {
 
-    public static final PropertyDescriptor REGEX = new PropertyDescriptor.Builder().
-            name("Regular Expression").
-            description("The Regular Expression to search for in the FlowFile content").
-            required(true).
-            addValidator(StandardValidators.
-                    createRegexValidator(0, Integer.MAX_VALUE, true)).
-            expressionLanguageSupported(true).
-            defaultValue("\\S+").
-            build();
-    public static final PropertyDescriptor MATCHING_GROUP_FOR_LOOKUP_KEY = new PropertyDescriptor.Builder().
-            name("Matching Group").
-            description("The number of the matching group of the provided regex to replace with the corresponding value from the mapping file (if it exists).").
-            addValidator(StandardValidators.INTEGER_VALIDATOR).
-            required(true).
-            expressionLanguageSupported(true).
-            defaultValue("0").
-            build();
-    public static final PropertyDescriptor MAPPING_FILE = new PropertyDescriptor.Builder().
-            name("Mapping File").
-            description("The name of the file (including the full path) containing the Mappings.").
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            required(true).
-            build();
-    public static final PropertyDescriptor MAPPING_FILE_REFRESH_INTERVAL = new PropertyDescriptor.Builder().
-            name("Mapping File Refresh Interval").
-            description("The polling interval in seconds to check for updates to the mapping file. The default is 60s.").
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            required(true).
-            defaultValue("60s").
-            build();
-    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder().
-            name("Character Set").
-            description("The Character Set in which the file is encoded").
-            required(true).
-            addValidator(StandardValidators.CHARACTER_SET_VALIDATOR).
-            defaultValue("UTF-8").
-            build();
-    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder().
-            name("Maximum Buffer Size").
-            description("Specifies the maximum amount of data to buffer (per file) in order to apply the regular expressions. If a FlowFile is larger than this value, the FlowFile will be routed to 'failure'").
-            required(true).
-            addValidator(StandardValidators.DATA_SIZE_VALIDATOR).
-            defaultValue("1 MB").
-            build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("FlowFiles that could not be updated are routed to this relationship").
-            build();
-
-    private final Pattern backReferencePattern = Pattern.
-            compile("[^\\\\]\\$(\\d+)");
+    public static final PropertyDescriptor REGEX = new PropertyDescriptor.Builder()
+            .name("Regular Expression")
+            .description("The Regular Expression to search for in the FlowFile content")
+            .required(true)
+            .addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
+            .expressionLanguageSupported(true)
+            .defaultValue("\\S+")
+            .build();
+    public static final PropertyDescriptor MATCHING_GROUP_FOR_LOOKUP_KEY = new PropertyDescriptor.Builder()
+            .name("Matching Group")
+            .description("The number of the matching group of the provided regex to replace with the corresponding value from the mapping file (if it exists).")
+            .addValidator(StandardValidators.INTEGER_VALIDATOR)
+            .required(true)
+            .expressionLanguageSupported(true)
+            .defaultValue("0")
+            .build();
+    public static final PropertyDescriptor MAPPING_FILE = new PropertyDescriptor.Builder()
+            .name("Mapping File")
+            .description("The name of the file (including the full path) containing the Mappings.")
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .required(true)
+            .build();
+    public static final PropertyDescriptor MAPPING_FILE_REFRESH_INTERVAL = new PropertyDescriptor.Builder()
+            .name("Mapping File Refresh Interval")
+            .description("The polling interval in seconds to check for updates to the mapping file. The default is 60s.")
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .required(true)
+            .defaultValue("60s")
+            .build();
+    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
+            .name("Character Set")
+            .description("The Character Set in which the file is encoded")
+            .required(true)
+            .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
+            .defaultValue("UTF-8")
+            .build();
+    public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder()
+            .name("Maximum Buffer Size")
+            .description("Specifies the maximum amount of data to buffer (per file) in order to apply the regular expressions. If a FlowFile is larger than this value, the FlowFile will be routed to 'failure'")
+            .required(true)
+            .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
+            .defaultValue("1 MB")
+            .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("FlowFiles that could not be updated are routed to this relationship")
+            .build();
+
+    private final Pattern backReferencePattern = Pattern.compile("[^\\\\]\\$(\\d+)");
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -134,31 +132,23 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
     private final ReentrantLock processorLock = new ReentrantLock();
     private final AtomicLong lastModified = new AtomicLong(0L);
     final AtomicLong mappingTestTime = new AtomicLong(0);
-    private final AtomicReference<ConfigurationState> configurationStateRef = new AtomicReference<>(
-            new ConfigurationState(null));
+    private final AtomicReference<ConfigurationState> configurationStateRef = new AtomicReference<>(new ConfigurationState(null));
 
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> errors = new ArrayList<>(super.
-                customValidate(context));
-
-        final String regexValue = context.getProperty(REGEX).
-                evaluateAttributeExpressions().
-                getValue();
-        final int numCapturingGroups = Pattern.compile(regexValue).
-                matcher("").
-                groupCount();
-        final int groupToMatch = context.
-                getProperty(MATCHING_GROUP_FOR_LOOKUP_KEY).
-                evaluateAttributeExpressions().
-                asInteger();
+        final List<ValidationResult> errors = new ArrayList<>(super.customValidate(context));
+
+        final String regexValue = context.getProperty(REGEX).evaluateAttributeExpressions().getValue();
+        final int numCapturingGroups = Pattern.compile(regexValue).matcher("").groupCount();
+        final int groupToMatch = context.getProperty(MATCHING_GROUP_FOR_LOOKUP_KEY).evaluateAttributeExpressions().asInteger();
 
         if (groupToMatch > numCapturingGroups) {
-            errors.add(new ValidationResult.Builder().
-                    subject("Insufficient Matching Groups").
-                    valid(false).
-                    explanation("The specified matching group does not exist for the regular expression provided").
-                    build());
+            errors.add(
+                    new ValidationResult.Builder()
+                    .subject("Insufficient Matching Groups")
+                    .valid(false)
+                    .explanation("The specified matching group does not exist for the regular expression provided")
+                    .build());
         }
         return errors;
     }
@@ -200,9 +190,7 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
 
         final ProcessorLog logger = getLogger();
 
-        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).
-                asDataSize(DataUnit.B).
-                intValue();
+        final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
 
         for (FlowFile flowFile : flowFiles) {
             if (flowFile.getSize() > maxBufferSize) {
@@ -212,13 +200,10 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
 
             final StopWatch stopWatch = new StopWatch(true);
 
-            flowFile = session.
-                    write(flowFile, new ReplaceTextCallback(context, flowFile, maxBufferSize));
+            flowFile = session.write(flowFile, new ReplaceTextCallback(context, flowFile, maxBufferSize));
 
             logger.info("Transferred {} to 'success'", new Object[]{flowFile});
-            session.getProvenanceReporter().
-                    modifyContent(flowFile, stopWatch.
-                            getElapsed(TimeUnit.MILLISECONDS));
+            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             session.transfer(flowFile, REL_SUCCESS);
         }
     }
@@ -252,42 +237,33 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
                 // if not queried mapping file lastUpdate time in
                 // mapppingRefreshPeriodSecs, do so.
                 long currentTimeSecs = System.currentTimeMillis() / 1000;
-                long mappingRefreshPeriodSecs = context.
-                        getProperty(MAPPING_FILE_REFRESH_INTERVAL).
-                        asTimePeriod(TimeUnit.SECONDS);
+                long mappingRefreshPeriodSecs = context.getProperty(MAPPING_FILE_REFRESH_INTERVAL).asTimePeriod(TimeUnit.SECONDS);
 
                 boolean retry = (currentTimeSecs > (mappingTestTime.get() + mappingRefreshPeriodSecs));
                 if (retry) {
                     mappingTestTime.set(System.currentTimeMillis() / 1000);
                     // see if the mapping file needs to be reloaded
-                    final String fileName = context.getProperty(MAPPING_FILE).
-                            getValue();
+                    final String fileName = context.getProperty(MAPPING_FILE).getValue();
                     final File file = new File(fileName);
                     if (file.exists() && file.isFile() && file.canRead()) {
                         if (file.lastModified() > lastModified.get()) {
                             lastModified.getAndSet(file.lastModified());
                             try (FileInputStream is = new FileInputStream(file)) {
-                                logger.
-                                        info("Reloading mapping file: {}", new Object[]{fileName});
+                                logger.info("Reloading mapping file: {}", new Object[]{fileName});
 
                                 final Map<String, String> mapping = loadMappingFile(is);
-                                final ConfigurationState newState = new ConfigurationState(
-                                        mapping);
+                                final ConfigurationState newState = new ConfigurationState(mapping);
                                 configurationStateRef.set(newState);
                             } catch (IOException e) {
-                                logger.
-                                        error("Error reading mapping file: {}", new Object[]{e.
-                                            getMessage()});
+                                logger.error("Error reading mapping file: {}", new Object[]{e.getMessage()});
                             }
                         }
                     } else {
-                        logger.
-                                error("Mapping file does not exist or is not readable: {}", new Object[]{fileName});
+                        logger.error("Mapping file does not exist or is not readable: {}", new Object[]{fileName});
                     }
                 }
             } catch (Exception e) {
-                logger.error("Error loading mapping file: {}", new Object[]{e.
-                    getMessage()});
+                logger.error("Error loading mapping file: {}", new Object[]{e.getMessage()});
             } finally {
                 processorLock.unlock();
             }
@@ -354,34 +330,23 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
         };
 
         private ReplaceTextCallback(ProcessContext context, FlowFile flowFile, int maxBufferSize) {
-            this.regex = context.getProperty(REGEX).
-                    evaluateAttributeExpressions(flowFile, quotedAttributeDecorator).
-                    getValue();
+            this.regex = context.getProperty(REGEX).evaluateAttributeExpressions(flowFile, quotedAttributeDecorator).getValue();
             this.flowFile = flowFile;
 
-            this.charset = Charset.forName(context.getProperty(CHARACTER_SET).
-                    getValue());
+            this.charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue());
 
-            final String regexValue = context.getProperty(REGEX).
-                    evaluateAttributeExpressions().
-                    getValue();
-            this.numCapturingGroups = Pattern.compile(regexValue).
-                    matcher("").
-                    groupCount();
+            final String regexValue = context.getProperty(REGEX).evaluateAttributeExpressions().getValue();
+            this.numCapturingGroups = Pattern.compile(regexValue).matcher("").groupCount();
 
             this.buffer = new byte[maxBufferSize];
 
-            this.groupToMatch = context.
-                    getProperty(MATCHING_GROUP_FOR_LOOKUP_KEY).
-                    evaluateAttributeExpressions().
-                    asInteger();
+            this.groupToMatch = context.getProperty(MATCHING_GROUP_FOR_LOOKUP_KEY).evaluateAttributeExpressions().asInteger();
         }
 
         @Override
         public void process(final InputStream in, final OutputStream out) throws IOException {
 
-            final Map<String, String> mapping = configurationStateRef.get().
-                    getMapping();
+            final Map<String, String> mapping = configurationStateRef.get().getMapping();
 
             StreamUtils.fillBuffer(in, buffer, false);
 
@@ -389,8 +354,7 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
 
             final String contentString = new String(buffer, 0, flowFileSize, charset);
 
-            final Matcher matcher = Pattern.compile(regex).
-                    matcher(contentString);
+            final Matcher matcher = Pattern.compile(regex).matcher(contentString);
 
             matcher.reset();
             boolean result = matcher.find();
@@ -401,37 +365,26 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
                     String rv = mapping.get(matched);
 
                     if (rv == null) {
-                        String replacement = matcher.group().
-                                replace("$", "\\$");
+                        String replacement = matcher.group().replace("$", "\\$");
                         matcher.appendReplacement(sb, replacement);
                     } else {
                         String allRegexMatched = matcher.group(); //this is everything that matched the regex
 
-                        int scaledStart = matcher.start(groupToMatch) - matcher.
-                                start();
-                        int scaledEnd = scaledStart + matcher.
-                                group(groupToMatch).
-                                length();
+                        int scaledStart = matcher.start(groupToMatch) - matcher.start();
+                        int scaledEnd = scaledStart + matcher.group(groupToMatch).length();
 
                         StringBuilder replacementBuilder = new StringBuilder();
 
-                        replacementBuilder.append(allRegexMatched.
-                                substring(0, scaledStart).
-                                replace("$", "\\$"));
-                        replacementBuilder.
-                                append(fillReplacementValueBackReferences(rv, numCapturingGroups));
-                        replacementBuilder.append(allRegexMatched.
-                                substring(scaledEnd).
-                                replace("$", "\\$"));
-
-                        matcher.appendReplacement(sb, replacementBuilder.
-                                toString());
+                        replacementBuilder.append(allRegexMatched.substring(0, scaledStart).replace("$", "\\$"));
+                        replacementBuilder.append(fillReplacementValueBackReferences(rv, numCapturingGroups));
+                        replacementBuilder.append(allRegexMatched.substring(scaledEnd).replace("$", "\\$"));
+
+                        matcher.appendReplacement(sb, replacementBuilder.toString());
                     }
                     result = matcher.find();
                 } while (result);
                 matcher.appendTail(sb);
-                out.write(sb.toString().
-                        getBytes(charset));
+                out.write(sb.toString().getBytes(charset));
                 return;
             }
             out.write(contentString.getBytes(charset));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
index ff231d7..8b6a7b4 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
@@ -48,16 +48,10 @@ import org.apache.nifi.processor.util.StandardValidators;
 
 /**
  * <p>
- * This processor routes a FlowFile based on its flow file attributes by using
- * the Attribute Expression Language. The Expression Language is used by adding
- * Optional Properties to the processor. The name of the Property indicates the
- * name of the relationship to which a FlowFile will be routed if matched. The
- * value of the Property indicates an Attribute Expression Language Expression
- * that will be used to determine whether or not a given FlowFile will be routed
- * to the associated relationship. If multiple expressions match a FlowFile's
- * attributes, that FlowFile will be cloned and routed to each corresponding
- * relationship. If none of the supplied expressions matches for a given
- * FlowFile, that FlowFile will be routed to the 'unmatched' relationship.
+ * This processor routes a FlowFile based on its flow file attributes by using the Attribute Expression Language. The Expression Language is used by adding Optional Properties to the processor. The
+ * name of the Property indicates the name of the relationship to which a FlowFile will be routed if matched. The value of the Property indicates an Attribute Expression Language Expression that will
+ * be used to determine whether or not a given FlowFile will be routed to the associated relationship. If multiple expressions match a FlowFile's attributes, that FlowFile will be cloned and routed to
+ * each corresponding relationship. If none of the supplied expressions matches for a given FlowFile, that FlowFile will be routed to the 'unmatched' relationship.
  * </p>
  *
  * @author unattributed
@@ -79,43 +73,34 @@ public class RouteOnAttribute extends AbstractProcessor {
     private static final String routeAnyMatches = "Route to 'match' if any matches";
     private static final String routePropertyNameValue = "Route to Property name";
 
-    public static final AllowableValue ROUTE_PROPERTY_NAME = new AllowableValue(
-            routePropertyNameValue,
-            "Route to Property name",
-            "A copy of the FlowFile will be routed to each relationship whose corresponding expression evaluates to 'true'"
-    );
-    public static final AllowableValue ROUTE_ALL_MATCH = new AllowableValue(
-            routeAllMatchValue,
-            "Route to 'matched' if all match",
-            "Requires that all user-defined expressions evaluate to 'true' for the FlowFile to be considered a match"
-    );
-    public static final AllowableValue ROUTE_ANY_MATCHES = new AllowableValue(
-            routeAnyMatches, // keep the word 'match' instead of 'matched' to maintain backward compatibility (there was a typo originally)
+    public static final AllowableValue ROUTE_PROPERTY_NAME = new AllowableValue(routePropertyNameValue, "Route to Property name",
+            "A copy of the FlowFile will be routed to each relationship whose corresponding expression evaluates to 'true'");
+    public static final AllowableValue ROUTE_ALL_MATCH = new AllowableValue(routeAllMatchValue, "Route to 'matched' if all match",
+            "Requires that all user-defined expressions evaluate to 'true' for the FlowFile to be considered a match");
+    public static final AllowableValue ROUTE_ANY_MATCHES = new AllowableValue(routeAnyMatches, // keep the word 'match' instead of 'matched' to maintain backward compatibility (there was a typo originally)
             "Route to 'matched' if any matches",
-            "Requires that at least one user-defined expression evaluate to 'true' for hte FlowFile to be considered a match"
-    );
+            "Requires that at least one user-defined expression evaluate to 'true' for hte FlowFile to be considered a match");
 
-    public static final PropertyDescriptor ROUTE_STRATEGY = new PropertyDescriptor.Builder().
-            name("Routing Strategy").
-            description("Specifies how to determine which relationship to use when evaluating the Expression Language").
-            required(true).
-            allowableValues(ROUTE_PROPERTY_NAME, ROUTE_ALL_MATCH, ROUTE_ANY_MATCHES).
-            defaultValue(ROUTE_PROPERTY_NAME.getValue()).
-            build();
+    public static final PropertyDescriptor ROUTE_STRATEGY = new PropertyDescriptor.Builder()
+            .name("Routing Strategy")
+            .description("Specifies how to determine which relationship to use when evaluating the Expression Language")
+            .required(true)
+            .allowableValues(ROUTE_PROPERTY_NAME, ROUTE_ALL_MATCH, ROUTE_ANY_MATCHES)
+            .defaultValue(ROUTE_PROPERTY_NAME.getValue())
+            .build();
 
     public static final Relationship REL_NO_MATCH = new Relationship.Builder()
-            .name("unmatched").
-            description("FlowFiles that do not match any user-define expression will be routed here").
-            build();
+            .name("unmatched")
+            .description("FlowFiles that do not match any user-define expression will be routed here")
+            .build();
     public static final Relationship REL_MATCH = new Relationship.Builder()
-            .name("matched").
-            description("FlowFiles will be routed to 'match' if one or all Expressions match, depending on the configuration of the Routing Strategy property").
-            build();
+            .name("matched")
+            .description("FlowFiles will be routed to 'match' if one or all Expressions match, depending on the configuration of the Routing Strategy property")
+            .build();
 
     private AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
     private List<PropertyDescriptor> properties;
-    private volatile String configuredRouteStrategy = ROUTE_STRATEGY.
-            getDefaultValue();
+    private volatile String configuredRouteStrategy = ROUTE_STRATEGY.getDefaultValue();
     private volatile Set<String> dynamicPropertyNames = new HashSet<>();
 
     @Override
@@ -142,13 +127,12 @@ public class RouteOnAttribute extends AbstractProcessor {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .required(false).
-                name(propertyDescriptorName).
-                addValidator(StandardValidators.
-                        createAttributeExpressionLanguageValidator(ResultType.BOOLEAN, false)).
-                dynamic(true).
-                expressionLanguageSupported(true).
-                build();
+                .required(false)
+                .name(propertyDescriptorName)
+                .addValidator(StandardValidators.createAttributeExpressionLanguageValidator(ResultType.BOOLEAN, false))
+                .dynamic(true)
+                .expressionLanguageSupported(true)
+                .build();
     }
 
     @Override
@@ -163,8 +147,7 @@ public class RouteOnAttribute extends AbstractProcessor {
                 newDynamicPropertyNames.add(descriptor.getName());
             }
 
-            this.dynamicPropertyNames = Collections.
-                    unmodifiableSet(newDynamicPropertyNames);
+            this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames);
         }
 
         // formulate the new set of Relationships
@@ -173,8 +156,7 @@ public class RouteOnAttribute extends AbstractProcessor {
         final String routeStrategy = configuredRouteStrategy;
         if (ROUTE_PROPERTY_NAME.equals(routeStrategy)) {
             for (final String propName : allDynamicProps) {
-                newRelationships.add(new Relationship.Builder().name(propName).
-                        build());
+                newRelationships.add(new Relationship.Builder().name(propName).build());
             }
         } else {
             newRelationships.add(REL_MATCH);
@@ -193,32 +175,26 @@ public class RouteOnAttribute extends AbstractProcessor {
 
         final ProcessorLog logger = getLogger();
         final Map<Relationship, PropertyValue> propertyMap = new HashMap<>();
-        for (final PropertyDescriptor descriptor : context.getProperties().
-                keySet()) {
+        for (final PropertyDescriptor descriptor : context.getProperties().keySet()) {
             if (!descriptor.isDynamic()) {
                 continue;
             }
 
-            propertyMap.put(new Relationship.Builder().
-                    name(descriptor.getName()).
-                    build(), context.getProperty(descriptor));
+            propertyMap.put(new Relationship.Builder().name(descriptor.getName()).build(), context.getProperty(descriptor));
         }
 
         final Set<Relationship> matchingRelationships = new HashSet<>();
-        for (final Map.Entry<Relationship, PropertyValue> entry : propertyMap.
-                entrySet()) {
+        for (final Map.Entry<Relationship, PropertyValue> entry : propertyMap.entrySet()) {
             final PropertyValue value = entry.getValue();
 
-            final boolean matches = value.evaluateAttributeExpressions(flowFile).
-                    asBoolean();
+            final boolean matches = value.evaluateAttributeExpressions(flowFile).asBoolean();
             if (matches) {
                 matchingRelationships.add(entry.getKey());
             }
         }
 
         final Set<Relationship> destinationRelationships = new HashSet<>();
-        switch (context.getProperty(ROUTE_STRATEGY).
-                getValue()) {
+        switch (context.getProperty(ROUTE_STRATEGY).getValue()) {
             case routeAllMatchValue:
                 if (matchingRelationships.size() == propertyMap.size()) {
                     destinationRelationships.add(REL_MATCH);
@@ -241,52 +217,36 @@ public class RouteOnAttribute extends AbstractProcessor {
 
         if (destinationRelationships.isEmpty()) {
             logger.info(this + " routing " + flowFile + " to unmatched");
-            flowFile = session.
-                    putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, REL_NO_MATCH.
-                            getName());
-            session.getProvenanceReporter().
-                    route(flowFile, REL_NO_MATCH);
+            flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, REL_NO_MATCH.getName());
+            session.getProvenanceReporter().route(flowFile, REL_NO_MATCH);
             session.transfer(flowFile, REL_NO_MATCH);
         } else {
-            final Iterator<Relationship> relationshipNameIterator = destinationRelationships.
-                    iterator();
-            final Relationship firstRelationship = relationshipNameIterator.
-                    next();
+            final Iterator<Relationship> relationshipNameIterator = destinationRelationships.iterator();
+            final Relationship firstRelationship = relationshipNameIterator.next();
             final Map<Relationship, FlowFile> transferMap = new HashMap<>();
             final Set<FlowFile> clones = new HashSet<>();
 
             // make all the clones for any remaining relationships
             while (relationshipNameIterator.hasNext()) {
-                final Relationship relationship = relationshipNameIterator.
-                        next();
+                final Relationship relationship = relationshipNameIterator.next();
                 final FlowFile cloneFlowFile = session.clone(flowFile);
                 clones.add(cloneFlowFile);
                 transferMap.put(relationship, cloneFlowFile);
             }
 
             // now transfer any clones generated
-            for (final Map.Entry<Relationship, FlowFile> entry : transferMap.
-                    entrySet()) {
-                logger.info(this + " cloned " + flowFile + " into " + entry.
-                        getValue() + " and routing clone to relationship " + entry.
-                        getKey());
-                FlowFile updatedFlowFile = session.
-                        putAttribute(entry.getValue(), ROUTE_ATTRIBUTE_KEY, entry.
-                                getKey().
-                                getName());
-                session.getProvenanceReporter().
-                        route(updatedFlowFile, entry.getKey());
+            for (final Map.Entry<Relationship, FlowFile> entry : transferMap.entrySet()) {
+                logger.info(this + " cloned " + flowFile + " into " + entry.getValue() + " and routing clone to relationship " + entry.getKey());
+                FlowFile updatedFlowFile = session.putAttribute(entry.getValue(), ROUTE_ATTRIBUTE_KEY, entry.getKey().getName());
+                session.getProvenanceReporter().route(updatedFlowFile, entry.getKey());
                 session.transfer(updatedFlowFile, entry.getKey());
             }
 
             //now transfer the original flow file
             logger.
                     info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
-            session.getProvenanceReporter().
-                    route(flowFile, firstRelationship);
-            flowFile = session.
-                    putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.
-                            getName());
+            session.getProvenanceReporter().route(flowFile, firstRelationship);
+            flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.getName());
             session.transfer(flowFile, firstRelationship);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
index 8f1eb4e..937bc69 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
@@ -70,34 +70,34 @@ public class RouteOnContent extends AbstractProcessor {
     public static final String MATCH_ALL = "content must match exactly";
     public static final String MATCH_SUBSEQUENCE = "content must contain match";
 
-    public static final PropertyDescriptor BUFFER_SIZE = new PropertyDescriptor.Builder().
-            name("Content Buffer Size").
-            description("Specifies the maximum amount of data to buffer in order to apply the regular expressions. If the size of the FlowFile "
-                    + "exceeds this value, any amount of this value will be ignored").
-            required(true).
-            addValidator(StandardValidators.DATA_SIZE_VALIDATOR).
-            defaultValue("1 MB").
-            build();
-    public static final PropertyDescriptor MATCH_REQUIREMENT = new PropertyDescriptor.Builder().
-            name("Match Requirement").
-            description("Specifies whether the entire content of the file must match the regular expression exactly, or if any part of the file "
-                    + "(up to Content Buffer Size) can contain the regular expression in order to be considered a match").
-            required(true).
-            allowableValues(MATCH_ALL, MATCH_SUBSEQUENCE).
-            defaultValue(MATCH_ALL).
-            build();
-    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder().
-            name("Character Set").
-            description("The Character Set in which the file is encoded").
-            required(true).
-            addValidator(StandardValidators.CHARACTER_SET_VALIDATOR).
-            defaultValue("UTF-8").
-            build();
-
-    public static final Relationship REL_NO_MATCH = new Relationship.Builder().
-            name("unmatched").
-            description("FlowFiles that do not match any of the user-supplied regular expressions will be routed to this relationship").
-            build();
+    public static final PropertyDescriptor BUFFER_SIZE = new PropertyDescriptor.Builder()
+            .name("Content Buffer Size")
+            .description("Specifies the maximum amount of data to buffer in order to apply the regular expressions. If the size of the FlowFile "
+                    + "exceeds this value, any amount of this value will be ignored")
+            .required(true)
+            .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
+            .defaultValue("1 MB")
+            .build();
+    public static final PropertyDescriptor MATCH_REQUIREMENT = new PropertyDescriptor.Builder()
+            .name("Match Requirement")
+            .description("Specifies whether the entire content of the file must match the regular expression exactly, or if any part of the file "
+                    + "(up to Content Buffer Size) can contain the regular expression in order to be considered a match")
+            .required(true)
+            .allowableValues(MATCH_ALL, MATCH_SUBSEQUENCE)
+            .defaultValue(MATCH_ALL)
+            .build();
+    public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
+            .name("Character Set")
+            .description("The Character Set in which the file is encoded")
+            .required(true)
+            .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
+            .defaultValue("UTF-8")
+            .build();
+
+    public static final Relationship REL_NO_MATCH = new Relationship.Builder()
+            .name("unmatched")
+            .description("FlowFiles that do not match any of the user-supplied regular expressions will be routed to this relationship")
+            .build();
 
     private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
     private List<PropertyDescriptor> properties;
@@ -132,23 +132,19 @@ public class RouteOnContent extends AbstractProcessor {
         }
 
         return new PropertyDescriptor.Builder()
-                .required(false).
-                name(propertyDescriptorName).
-                addValidator(StandardValidators.
-                        createRegexValidator(0, Integer.MAX_VALUE, true)).
-                dynamic(true).
-                expressionLanguageSupported(true).
-                build();
+                .required(false)
+                .name(propertyDescriptorName)
+                .addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
+                .dynamic(true)
+                .expressionLanguageSupported(true)
+                .build();
     }
 
     @Override
     public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
         if (descriptor.isDynamic()) {
-            final Set<Relationship> relationships = new HashSet<>(this.relationships.
-                    get());
-            final Relationship relationship = new Relationship.Builder().
-                    name(descriptor.getName()).
-                    build();
+            final Set<Relationship> relationships = new HashSet<>(this.relationships.get());
+            final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
 
             if (newValue == null) {
                 relationships.remove(relationship);
@@ -170,20 +166,15 @@ public class RouteOnContent extends AbstractProcessor {
         final AttributeValueDecorator quoteDecorator = new AttributeValueDecorator() {
             @Override
             public String decorate(final String attributeValue) {
-                return (attributeValue == null) ? null : Pattern.
-                        quote(attributeValue);
+                return (attributeValue == null) ? null : Pattern.quote(attributeValue);
             }
         };
 
         final Map<FlowFile, Set<Relationship>> flowFileDestinationMap = new HashMap<>();
         final ProcessorLog logger = getLogger();
 
-        final Charset charset = Charset.forName(context.
-                getProperty(CHARACTER_SET).
-                getValue());
-        final byte[] buffer = new byte[context.getProperty(BUFFER_SIZE).
-                asDataSize(DataUnit.B).
-                intValue()];
+        final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue());
+        final byte[] buffer = new byte[context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B).intValue()];
         for (final FlowFile flowFile : flowFiles) {
             final Set<Relationship> destinations = new HashSet<>();
             flowFileDestinationMap.put(flowFile, destinations);
@@ -192,82 +183,58 @@ public class RouteOnContent extends AbstractProcessor {
             session.read(flowFile, new InputStreamCallback() {
                 @Override
                 public void process(final InputStream in) throws IOException {
-                    bufferedByteCount.set(StreamUtils.
-                            fillBuffer(in, buffer, false));
+                    bufferedByteCount.set(StreamUtils.fillBuffer(in, buffer, false));
                 }
             });
 
-            final String contentString = new String(buffer, 0, bufferedByteCount.
-                    get(), charset);
+            final String contentString = new String(buffer, 0, bufferedByteCount.get(), charset);
 
-            for (final PropertyDescriptor descriptor : context.getProperties().
-                    keySet()) {
+            for (final PropertyDescriptor descriptor : context.getProperties().keySet()) {
                 if (!descriptor.isDynamic()) {
                     continue;
                 }
 
-                final String regex = context.getProperty(descriptor).
-                        evaluateAttributeExpressions(flowFile, quoteDecorator).
-                        getValue();
+                final String regex = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile, quoteDecorator).getValue();
                 final Pattern pattern = Pattern.compile(regex);
                 final boolean matches;
-                if (context.getProperty(MATCH_REQUIREMENT).
-                        getValue().
-                        equalsIgnoreCase(MATCH_ALL)) {
-                    matches = pattern.matcher(contentString).
-                            matches();
+                if (context.getProperty(MATCH_REQUIREMENT).getValue().equalsIgnoreCase(MATCH_ALL)) {
+                    matches = pattern.matcher(contentString).matches();
                 } else {
-                    matches = pattern.matcher(contentString).
-                            find();
+                    matches = pattern.matcher(contentString).find();
                 }
 
                 if (matches) {
-                    final Relationship relationship = new Relationship.Builder().
-                            name(descriptor.getName()).
-                            build();
+                    final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
                     destinations.add(relationship);
                 }
             }
         }
 
-        for (final Map.Entry<FlowFile, Set<Relationship>> entry : flowFileDestinationMap.
-                entrySet()) {
+        for (final Map.Entry<FlowFile, Set<Relationship>> entry : flowFileDestinationMap.entrySet()) {
             FlowFile flowFile = entry.getKey();
             final Set<Relationship> destinations = entry.getValue();
 
             if (destinations.isEmpty()) {
-                flowFile = session.
-                        putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, REL_NO_MATCH.
-                                getName());
+                flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, REL_NO_MATCH.getName());
                 session.transfer(flowFile, REL_NO_MATCH);
-                session.getProvenanceReporter().
-                        route(flowFile, REL_NO_MATCH);
+                session.getProvenanceReporter().route(flowFile, REL_NO_MATCH);
                 logger.info("Routing {} to 'unmatched'", new Object[]{flowFile});
             } else {
-                final Relationship firstRelationship = destinations.iterator().
-                        next();
+                final Relationship firstRelationship = destinations.iterator().next();
                 destinations.remove(firstRelationship);
 
                 for (final Relationship relationship : destinations) {
                     FlowFile clone = session.clone(flowFile);
-                    clone = session.
-                            putAttribute(clone, ROUTE_ATTRIBUTE_KEY, relationship.
-                                    getName());
-                    session.getProvenanceReporter().
-                            route(clone, relationship);
+                    clone = session.putAttribute(clone, ROUTE_ATTRIBUTE_KEY, relationship.getName());
+                    session.getProvenanceReporter().route(clone, relationship);
                     session.transfer(clone, relationship);
-                    logger.
-                            info("Cloning {} to {} and routing clone to {}", new Object[]{flowFile, clone, relationship});
+                    logger.info("Cloning {} to {} and routing clone to {}", new Object[]{flowFile, clone, relationship});
                 }
 
-                flowFile = session.
-                        putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.
-                                getName());
-                session.getProvenanceReporter().
-                        route(flowFile, firstRelationship);
+                flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.getName());
+                session.getProvenanceReporter().route(flowFile, firstRelationship);
                 session.transfer(flowFile, firstRelationship);
-                logger.
-                        info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
+                logger.info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
             }
         }
     }


[43/50] [abbrv] incubator-nifi git commit: NIFI-271 findbugs is now available by activating the findbugs profile. Not making required because we need to work down a lot of warnings

Posted by mc...@apache.org.
NIFI-271 findbugs is now available by activating the findbugs profile.  Not making required because we need to work down a lot of warnings


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/6c3256e0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/6c3256e0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/6c3256e0

Branch: refs/heads/NIFI-292
Commit: 6c3256e00a2c10f6968aef65cfec90557f8cfab2
Parents: e627482
Author: joewitt <jo...@apache.org>
Authored: Tue Apr 28 01:37:38 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Tue Apr 28 01:37:38 2015 -0400

----------------------------------------------------------------------
 nifi-nar-maven-plugin/pom.xml |  7 +++++++
 nifi-parent/pom.xml           | 34 ++++++++++++++++++++++++++++++++--
 2 files changed, 39 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6c3256e0/nifi-nar-maven-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-maven-plugin/pom.xml b/nifi-nar-maven-plugin/pom.xml
index 46dd333..765cbdf 100644
--- a/nifi-nar-maven-plugin/pom.xml
+++ b/nifi-nar-maven-plugin/pom.xml
@@ -66,6 +66,13 @@
                     </execution>
                 </executions>
             </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+                <configuration>
+                    <excludes>**/HelpMojo.java</excludes>
+                </configuration>
+            </plugin>            
         </plugins>        
     </build>
     <dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6c3256e0/nifi-parent/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-parent/pom.xml b/nifi-parent/pom.xml
index dbc31df..6fcf364 100644
--- a/nifi-parent/pom.xml
+++ b/nifi-parent/pom.xml
@@ -205,7 +205,18 @@
                             <version>6.5</version>
                         </dependency>
                     </dependencies>
-                </plugin>                
+                </plugin>
+                <plugin>
+                    <groupId>org.codehaus.mojo</groupId>
+                    <artifactId>findbugs-maven-plugin</artifactId>
+                    <version>3.0.1</version>
+                    <configuration>
+                        <xmlOutput>true</xmlOutput>
+                        <effort>Max</effort>
+                        <failOnError>true</failOnError>
+                        <maxRank>14</maxRank>
+                    </configuration>
+                </plugin>                              
             </plugins>
         </pluginManagement>
         <plugins>
@@ -335,7 +346,6 @@
                     </checkstyleRules>
                     <violationSeverity>warning</violationSeverity>
                     <includeTestSourceDirectory>true</includeTestSourceDirectory>
-                    <excludes>**/HelpMojo.java,**/generated-sources</excludes>
                 </configuration>
             </plugin>      
             <plugin>
@@ -387,5 +397,25 @@
                 </plugins>
             </build>
         </profile>
+        <profile>
+            <!-- Run findbugs.  Activate with -Pfindbugs -->
+            <id>findbugs</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>findbugs-maven-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>run-findbugs</id>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>                  
+                </plugins>
+            </build>
+        </profile>
     </profiles>
 </project>


[48/50] [abbrv] incubator-nifi git commit: NIFI-292: - Continuing to annotate endpoints using swagger.

Posted by mc...@apache.org.
NIFI-292:
- Continuing to annotate endpoints using swagger.

Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/9bdc752a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/9bdc752a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/9bdc752a

Branch: refs/heads/NIFI-292
Commit: 9bdc752aa8964787d6e72c7a74a2cc6826577d2c
Parents: 180534b
Author: Matt Gilman <ma...@gmail.com>
Authored: Tue Apr 28 09:28:01 2015 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Tue Apr 28 09:28:01 2015 -0400

----------------------------------------------------------------------
 .../nifi/web/api/BulletinBoardResource.java     |   1 -
 .../apache/nifi/web/api/ClusterResource.java    | 131 +++---
 .../apache/nifi/web/api/ConnectionResource.java |  93 +++--
 .../apache/nifi/web/api/ControllerResource.java | 124 ++++--
 .../nifi/web/api/ControllerServiceResource.java | 394 ++++++++++++++++---
 .../org/apache/nifi/web/api/FunnelResource.java | 167 +++++++-
 .../apache/nifi/web/api/InputPortResource.java  | 138 ++++++-
 7 files changed, 856 insertions(+), 192 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
index d496ed7..6cc1c8a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
@@ -20,7 +20,6 @@ import com.wordnik.swagger.annotations.Api;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
-import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
index 7d76179..a937467 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
@@ -111,7 +111,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the status of this NiFi cluster.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A clusterStatusEntity
      */
     @GET
@@ -139,7 +141,7 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getClusterStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -182,9 +184,12 @@ public class ClusterResource extends ApplicationResource {
     }
 
     /**
-     * Gets the contents of this NiFi cluster. This includes all nodes and their status.
+     * Gets the contents of this NiFi cluster. This includes all nodes and their
+     * status.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A clusterEntity
      */
     @GET
@@ -212,7 +217,7 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getCluster(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -269,7 +274,7 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response searchCluster(
             @ApiParam(
-                    value = "Node address to search for",
+                    value = "Node address to search for.",
                     required = true
             )
             @QueryParam("q") @DefaultValue(StringUtils.EMPTY) String value) {
@@ -314,7 +319,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A processorEntity
      */
@@ -344,16 +351,16 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getProcessor(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The processor id",
+                    value = "The processor id.",
                     required = true
             )
             @PathParam("id") String id) {
-        
+
         if (!properties.isClusterManager()) {
 
             final ProcessorDTO dto = serviceFacade.getProcessor(id);
@@ -378,8 +385,11 @@ public class ClusterResource extends ApplicationResource {
      * Updates the processors annotation data.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param processorId The id of the processor.
      * @param annotationData The annotation data to set.
      * @return A processorEntity.
@@ -462,7 +472,7 @@ public class ClusterResource extends ApplicationResource {
     public Response updateProcessor(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The processor id",
+                    value = "The processor id.",
                     required = true
             )
             @PathParam("id") final String processorId,
@@ -529,7 +539,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusEntity
      */
@@ -559,10 +571,10 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getProcessorStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
                     value = "The processor id",
                     required = true
@@ -592,7 +604,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusHistoryEntity
      */
@@ -622,7 +636,7 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getProcessorStatusHistory(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
@@ -654,7 +668,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the connection status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusEntity
      */
@@ -684,10 +700,10 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getConnectionStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
                     value = "The connection id",
                     required = true
@@ -717,7 +733,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the connections status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusHistoryEntity
      */
@@ -747,12 +765,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getConnectionStatusHistory(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The connection id",
+                    value = "The connection id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -779,7 +797,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the process group status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the process group
      * @return A clusterProcessGroupStatusEntity
      */
@@ -809,12 +829,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getProcessGroupStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The process group id",
+                    value = "The process group id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -842,7 +862,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the process group status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the process group
      * @return A clusterProcessGroupStatusHistoryEntity
      */
@@ -872,12 +894,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getProcessGroupStatusHistory(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The process group id",
+                    value = "The process group id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -904,7 +926,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the remote process group status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the remote process group
      * @return A clusterRemoteProcessGroupStatusEntity
      */
@@ -934,12 +958,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getRemoteProcessGroupStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The remote process group id",
+                    value = "The remote process group id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -967,7 +991,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the input port status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the input port
      * @return A clusterPortStatusEntity
      */
@@ -997,12 +1023,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getInputPortStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The input port id",
+                    value = "The input port id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -1030,7 +1056,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the output port status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the output port
      * @return A clusterPortStatusEntity
      */
@@ -1060,12 +1088,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getOutputPortStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The output port id",
+                    value = "The output port id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -1093,7 +1121,9 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the remote process group status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the processor
      * @return A clusterRemoteProcessGroupStatusHistoryEntity
      */
@@ -1123,12 +1153,12 @@ public class ClusterResource extends ApplicationResource {
     )
     public Response getRemoteProcessGroupStatusHistory(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The remote process group id",
+                    value = "The remote process group id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -1153,7 +1183,6 @@ public class ClusterResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
index 1e06fa4..503d6ba 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
@@ -114,12 +114,15 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Gets all the connections.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A connectionsEntity.
      */
     @GET
     @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ConnectionsEntity.class)
     @ApiOperation(
@@ -141,7 +144,7 @@ public class ConnectionResource extends ApplicationResource {
     )
     public Response getConnections(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -170,7 +173,9 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the connection.
      * @return A connectionEntity.
      */
@@ -200,12 +205,12 @@ public class ConnectionResource extends ApplicationResource {
     )
     public Response getConnection(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The connection id",
+                    value = "The connection id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -234,7 +239,9 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the connection to retrieve.
      * @return A statusHistoryEntity.
      */
@@ -264,12 +271,12 @@ public class ConnectionResource extends ApplicationResource {
     )
     public Response getConnectionStatusHistory(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
-            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The connection id",
+                    value = "The connection id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -299,8 +306,11 @@ public class ConnectionResource extends ApplicationResource {
      * Creates a connection.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param name The name of the connection.
      * @param sourceId The id of the source connectable.
      * @param sourceGroupId The parent group id for the source.
@@ -308,10 +318,15 @@ public class ConnectionResource extends ApplicationResource {
      * @param bends Array of bend points in string form ["x,y", "x,y", "x,y"]
      * @param relationships Array of relationships.
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
-     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
+     * @param backPressureObjectThreshold The object count for when to apply
+     * back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply
+     * back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer
+     * to one of the types in the GET /controller/prioritizers response. If this
+     * parameter is not specified no change will be made. If this parameter
+     * appears with no value (empty string), it will be treated as an empty
+     * array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The parent group id for the destination.
      * @param destinationType The type of the destination connectable.
@@ -321,6 +336,7 @@ public class ConnectionResource extends ApplicationResource {
     @POST
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ConnectionEntity.class)
     public Response createConnection(
@@ -455,6 +471,7 @@ public class ConnectionResource extends ApplicationResource {
     @POST
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ConnectionEntity.class)
     @ApiOperation(
@@ -476,10 +493,9 @@ public class ConnectionResource extends ApplicationResource {
     public Response createConnection(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The connection configuration details",
+                    value = "The connection configuration details.",
                     required = true
-            )
-            ConnectionEntity connectionEntity) {
+            ) ConnectionEntity connectionEntity) {
 
         if (connectionEntity == null || connectionEntity.getConnection() == null) {
             throw new IllegalArgumentException("Connection details must be specified.");
@@ -557,8 +573,11 @@ public class ConnectionResource extends ApplicationResource {
      * Updates the specified relationship target.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param connectionId The id of the source processor.
      * @param name The name of the connection.
      * @param relationships Array of relationships.
@@ -566,10 +585,15 @@ public class ConnectionResource extends ApplicationResource {
      * @param labelIndex The control point index for the connection label
      * @param zIndex The zIndex for this connection
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
-     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
+     * @param backPressureObjectThreshold The object count for when to apply
+     * back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply
+     * back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer
+     * to one of the types in the GET /controller/prioritizers response. If this
+     * parameter is not specified no change will be made. If this parameter
+     * appears with no value (empty string), it will be treated as an empty
+     * array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The group id of the destination.
      * @param destinationType The type of the destination type.
@@ -739,15 +763,14 @@ public class ConnectionResource extends ApplicationResource {
     public Response updateConnection(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The connection id",
+                    value = "The connection id.",
                     required = true
             )
             @PathParam("id") String id,
             @ApiParam(
-                    value = "The connection configuration details",
+                    value = "The connection configuration details.",
                     required = true
-            )
-            ConnectionEntity connectionEntity) {
+            ) ConnectionEntity connectionEntity) {
 
         if (connectionEntity == null || connectionEntity.getConnection() == null) {
             throw new IllegalArgumentException("Connection details must be specified.");
@@ -805,8 +828,11 @@ public class ConnectionResource extends ApplicationResource {
      * Removes the specified connection.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the connection.
      * @return An Entity containing the client id and an updated revision.
      */
@@ -835,17 +861,17 @@ public class ConnectionResource extends ApplicationResource {
     public Response deleteRelationshipTarget(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The revision is used to verify the client is working with the latest version of the flow",
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
                     required = false
             )
             @QueryParam(VERSION) LongParameter version,
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "The connection id",
+                    value = "The connection id.",
                     required = true
             )
             @PathParam("id") String id) {
@@ -885,7 +911,6 @@ public class ConnectionResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
index 821ca2f..418d475 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
@@ -249,7 +249,8 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Returns a 200 OK response to indicate this is a valid controller endpoint.
+     * Returns a 200 OK response to indicate this is a valid controller
+     * endpoint.
      *
      * @return An OK response with an empty entity body.
      */
@@ -266,7 +267,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Returns the details of this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A controllerEntity.
      */
     @GET
@@ -289,7 +292,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getController(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -361,14 +364,22 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Creates a new archive of this flow controller. Note, this is a POST operation that returns a URI that is not representative of the thing that was actually created. The archive that is created
-     * cannot be referenced at a later time, therefore there is no corresponding URI. Instead the request URI is returned.
+     * Creates a new archive of this flow controller. Note, this is a POST
+     * operation that returns a URI that is not representative of the thing that
+     * was actually created. The archive that is created cannot be referenced at
+     * a later time, therefore there is no corresponding URI. Instead the
+     * request URI is returned.
      *
-     * Alternatively, we could have performed a PUT request. However, PUT requests are supposed to be idempotent and this endpoint is certainly not.
+     * Alternatively, we could have performed a PUT request. However, PUT
+     * requests are supposed to be idempotent and this endpoint is certainly
+     * not.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A processGroupEntity.
      */
     @POST
@@ -380,9 +391,9 @@ public class ControllerResource extends ApplicationResource {
     @ApiOperation(
             value = "Creates a new archive of this NiFi flow configuration",
             notes = "This POST operation returns a URI that is not representative of the thing "
-                    + "that was actually created. The archive that is created cannot be referenced "
-                    + "at a later time, therefore there is no corresponding URI. Instead the "
-                    + "request URI is returned.",
+            + "that was actually created. The archive that is created cannot be referenced "
+            + "at a later time, therefore there is no corresponding URI. Instead the "
+            + "request URI is returned.",
             response = ProcessGroupEntity.class,
             authorizations = {
                 @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
@@ -399,12 +410,12 @@ public class ControllerResource extends ApplicationResource {
     public Response createArchive(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The revision is used to verify the client is working with the latest version of the flow",
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
                     required = true
             )
             @FormParam(VERSION) LongParameter version,
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @FormParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -487,7 +498,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the status for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A controllerStatusEntity.
      */
     @GET
@@ -515,7 +528,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getControllerStatus(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -538,7 +551,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the counters report for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A countersEntity.
      */
     @GET
@@ -566,7 +581,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getCounters(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -590,7 +605,9 @@ public class ControllerResource extends ApplicationResource {
      * Update the specified counter. This will reset the counter value to 0.
      *
      * @param httpServletRequest request
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the counter.
      * @return A counterEntity.
      */
@@ -619,7 +636,7 @@ public class ControllerResource extends ApplicationResource {
     public Response updateCounter(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @FormParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
@@ -655,7 +672,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the configuration for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A controllerConfigurationEntity.
      */
     @GET
@@ -684,7 +703,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getControllerConfig(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -714,12 +733,17 @@ public class ControllerResource extends ApplicationResource {
      * Update the configuration for this NiFi.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param name The name of this controller.
      * @param comments The comments of this controller.
-     * @param maxTimerDrivenThreadCount The maximum number of timer driven threads this controller has available.
-     * @param maxEventDrivenThreadCount The maximum number of timer driven threads this controller has available.
+     * @param maxTimerDrivenThreadCount The maximum number of timer driven
+     * threads this controller has available.
+     * @param maxEventDrivenThreadCount The maximum number of timer driven
+     * threads this controller has available.
      * @return A controllerConfigurationEntity.
      */
     @PUT
@@ -798,7 +822,7 @@ public class ControllerResource extends ApplicationResource {
     public Response updateControllerConfig(
             @Context HttpServletRequest httpServletRequest,
             @ApiParam(
-                    value = "The controller configuration",
+                    value = "The controller configuration.",
                     required = true
             ) ControllerConfigurationEntity configEntity) {
 
@@ -847,9 +871,12 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Retrieves the user details, including the authorities, about the user making the request.
+     * Retrieves the user details, including the authorities, about the user
+     * making the request.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A authoritiesEntity.
      */
     @GET
@@ -877,7 +904,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getAuthorities(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -905,7 +932,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the banners for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A bannerEntity.
      */
     @GET
@@ -933,7 +962,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getBanners(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -966,7 +995,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of processors that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A processorTypesEntity.
      */
     @GET
@@ -994,7 +1025,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getProcessorTypes(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -1020,7 +1051,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of controller services that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param serviceType Returns only services that implement this type
      * @return A controllerServicesTypesEntity.
      */
@@ -1049,12 +1082,12 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getControllerServiceTypes(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @ApiParam(
-                    value = "If specified, will only return controller services of this type",
+                    value = "If specified, will only return controller services of this type.",
                     required = false
             )
             @QueryParam("serviceType") String serviceType) {
@@ -1080,7 +1113,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of reporting tasks that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A controllerServicesTypesEntity.
      */
     @GET
@@ -1108,7 +1143,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getReportingTaskTypes(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -1134,7 +1169,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of prioritizers that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A prioritizerTypesEntity.
      */
     @GET
@@ -1162,7 +1199,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getPrioritizers(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -1188,7 +1225,9 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves details about this NiFi to put in the About dialog.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return An aboutEntity.
      */
     @GET
@@ -1216,7 +1255,7 @@ public class ControllerResource extends ApplicationResource {
     )
     public Response getAboutInfo(
             @ApiParam(
-                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
                     required = false
             )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
@@ -1247,7 +1286,6 @@ public class ControllerResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }


[47/50] [abbrv] incubator-nifi git commit: NIFI-292: - Continuing to annotate endpoints using swagger.

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
index f9bfda3..1581722 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
@@ -17,6 +17,11 @@
 package org.apache.nifi.web.api;
 
 import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -123,7 +128,8 @@ public class ControllerServiceResource extends ApplicationResource {
     }
 
     /**
-     * Parses the availability and ensure that the specified availability makes sense for the given NiFi instance.
+     * Parses the availability and ensure that the specified availability makes
+     * sense for the given NiFi instance.
      *
      * @param availability avail
      * @return avail
@@ -147,17 +153,50 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves all the of controller services in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @return A controllerServicesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ControllerServicesEntity.class)
-    public Response getControllerServices(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("availability") String availability) {
+    @ApiOperation(
+            value = "Gets all controller services",
+            response = ControllerServicesEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getControllerServices(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability) {
+
         final Availability avail = parseAvailability(availability);
 
         // replicate if cluster manager
@@ -185,10 +224,14 @@ public class ControllerServiceResource extends ApplicationResource {
      * Creates a new controller service.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param type The type of controller service to create.
      * @return A controllerServiceEntity.
      */
@@ -228,8 +271,9 @@ public class ControllerServiceResource extends ApplicationResource {
      * Creates a new Controller Service.
      *
      * @param httpServletRequest request
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param controllerServiceEntity A controllerServiceEntity.
      * @return A controllerServiceEntity.
      */
@@ -239,10 +283,33 @@ public class ControllerServiceResource extends ApplicationResource {
     @Path("/{availability}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ControllerServiceEntity.class)
+    @ApiOperation(
+            value = "Creates a new controller service",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createControllerService(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
             @PathParam("availability") String availability,
-            ControllerServiceEntity controllerServiceEntity) {
+            @ApiParam(
+                    value = "The controller service configuration details.",
+                    required = true
+            ) ControllerServiceEntity controllerServiceEntity) {
 
         final Availability avail = parseAvailability(availability);
 
@@ -317,19 +384,56 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves the specified controller service.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to retrieve
      * @return A controllerServiceEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}/{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ControllerServiceEntity.class)
-    public Response getControllerService(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
-            @PathParam("availability") String availability, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets a controller service",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getControllerService(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         final Availability avail = parseAvailability(availability);
 
@@ -356,20 +460,59 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Returns the descriptor for the specified property.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param availability avail
      * @param id The id of the controller service.
      * @param propertyName The property
      * @return a propertyDescriptorEntity
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}/{id}/descriptors")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(PropertyDescriptorEntity.class)
+    @ApiOperation(
+            value = "Gets a controller service property descriptor",
+            response = PropertyDescriptorEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getPropertyDescriptor(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
-            @PathParam("availability") String availability, @PathParam("id") String id,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
+            @PathParam("id") String id,
+            @ApiParam(
+                    value = "The property name to return the descriptor for.",
+                    required = true
+            )
             @QueryParam("propertyName") String propertyName) {
 
         final Availability avail = parseAvailability(availability);
@@ -403,20 +546,56 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves the references of the specified controller service.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to retrieve
      * @return A controllerServiceEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}/{id}/references")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ControllerServiceEntity.class)
+    @ApiOperation(
+            value = "Gets a controller service",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getControllerServiceReferences(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
-            @PathParam("availability") String availability, @PathParam("id") String id) {
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         final Availability avail = parseAvailability(availability);
 
@@ -444,26 +623,71 @@ public class ControllerServiceResource extends ApplicationResource {
      * Updates the references of the specified controller service.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to retrieve
-     * @param state Sets the state of referencing components. A value of RUNNING or STOPPED will update referencing schedulable components (Processors and Reporting Tasks). A value of ENABLED or
-     * DISABLED will update referencing controller services.
+     * @param state Sets the state of referencing components. A value of RUNNING
+     * or STOPPED will update referencing schedulable components (Processors and
+     * Reporting Tasks). A value of ENABLED or DISABLED will update referencing
+     * controller services.
      * @return A controllerServiceEntity.
      */
     @PUT
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}/{id}/references")
-    @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
+    @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ControllerServiceEntity.class)
+    @ApiOperation(
+            value = "Updates a controller services references",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateControllerServiceReferences(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
+                    required = false
+            )
             @FormParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @FormParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
-            @PathParam("availability") String availability, @PathParam("id") String id,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
+            @PathParam("id") String id,
+            @ApiParam(
+                    value = "The new state of the references for the controller service.",
+                    allowableValues = "ENABLED, DISABLED, RUNNING, STOPPED",
+                    required = true
+            )
             @FormParam("state") @DefaultValue(StringUtils.EMPTY) String state) {
 
         // parse the state to determine the desired action
@@ -538,18 +762,26 @@ public class ControllerServiceResource extends ApplicationResource {
      * Updates the specified controller service.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to update.
      * @param name The name of the controller service
      * @param annotationData The annotation data for the controller service
      * @param comments The comments for the controller service
-     * @param state The state of this controller service. Should be ENABLED or DISABLED.
-     * @param markedForDeletion Array of property names whose value should be removed.
-     * @param formParams Additionally, the processor properties and styles are specified in the form parameters. Because the property names and styles differ from processor to processor they are
-     * specified in a map-like fashion:
+     * @param state The state of this controller service. Should be ENABLED or
+     * DISABLED.
+     * @param markedForDeletion Array of property names whose value should be
+     * removed.
+     * @param formParams Additionally, the processor properties and styles are
+     * specified in the form parameters. Because the property names and styles
+     * differ from processor to processor they are specified in a map-like
+     * fashion:
      * <br>
      * <ul>
      * <li>properties[required.file.path]=/path/to/file</li>
@@ -634,8 +866,9 @@ public class ControllerServiceResource extends ApplicationResource {
      * Updates the specified a new Controller Service.
      *
      * @param httpServletRequest request
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to update.
      * @param controllerServiceEntity A controllerServiceEntity.
      * @return A controllerServiceEntity.
@@ -646,11 +879,39 @@ public class ControllerServiceResource extends ApplicationResource {
     @Path("/{availability}/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ControllerServiceEntity.class)
+    @ApiOperation(
+            value = "Updates a controller service",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateControllerService(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
             @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
             @PathParam("id") String id,
-            ControllerServiceEntity controllerServiceEntity) {
+            @ApiParam(
+                    value = "The controller service configuration details.",
+                    required = true
+            ) ControllerServiceEntity controllerServiceEntity) {
 
         final Availability avail = parseAvailability(availability);
 
@@ -711,23 +972,62 @@ public class ControllerServiceResource extends ApplicationResource {
      * Removes the specified controller service.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
-     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
-     * availability.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
+     * @param availability Whether the controller service is available on the
+     * NCM only (ncm) or on the nodes only (node). If this instance is not
+     * clustered all services should use the node availability.
      * @param id The id of the controller service to remove.
      * @return A entity containing the client id and an updated revision.
      */
     @DELETE
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{availability}/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ControllerServiceEntity.class)
+    @ApiOperation(
+            value = "Deletes a controller service",
+            response = ControllerServiceEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response removeControllerService(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
+                    required = false
+            )
             @QueryParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
-            @PathParam("availability") String availability, @PathParam("id") String id) {
+            @ApiParam(
+                    value = "Whether the controller is available on the NCM or nodes. If the NiFi is standalone the availability should be NODE.",
+                    allowableValues = "NCM, NODE",
+                    required = true
+            )
+            @PathParam("availability") String availability,
+            @ApiParam(
+                    value = "The controller service id.",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         final Availability avail = parseAvailability(availability);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
index 73742e7..e71218c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
@@ -17,6 +17,11 @@
 package org.apache.nifi.web.api;
 
 import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -97,14 +102,40 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Retrieves all the of funnels in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @return A funnelsEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(FunnelsEntity.class)
-    public Response getFunnels(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets all funnels",
+            response = FunnelsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getFunnels(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -131,8 +162,11 @@ public class FunnelResource extends ApplicationResource {
      * Creates a new funnel.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @return A funnelEntity.
@@ -140,6 +174,7 @@ public class FunnelResource extends ApplicationResource {
     @POST
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(FunnelEntity.class)
     public Response createFunnel(
@@ -183,11 +218,30 @@ public class FunnelResource extends ApplicationResource {
     @POST
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(FunnelEntity.class)
+    @ApiOperation(
+            value = "Creates a funnel",
+            response = FunnelEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createFunnel(
             @Context HttpServletRequest httpServletRequest,
-            FunnelEntity funnelEntity) {
+            @ApiParam(
+                    value = "The funnel configuration details.",
+                    required = true
+            ) FunnelEntity funnelEntity) {
 
         if (funnelEntity == null || funnelEntity.getFunnel() == null) {
             throw new IllegalArgumentException("Funnel details must be specified.");
@@ -256,16 +310,47 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Retrieves the specified funnel.
      *
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the funnel to retrieve
      * @return A funnelEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(FunnelEntity.class)
-    public Response getFunnel(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets a funnel",
+            response = FunnelEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getFunnel(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The funnel id.",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -291,8 +376,11 @@ public class FunnelResource extends ApplicationResource {
      * Updates the specified funnel.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the funnel to update.
      * @param parentGroupId The id of the process group to move this funnel to.
      * @param x The x coordinate for this funnels position.
@@ -353,10 +441,33 @@ public class FunnelResource extends ApplicationResource {
     @Path("{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(FunnelEntity.class)
+    @ApiOperation(
+            value = "Updates a funnel",
+            response = FunnelEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateFunnel(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The funnel id.",
+                    required = true
+            )
             @PathParam("id") String id,
-            FunnelEntity funnelEntity) {
+            @ApiParam(
+                    value = "The funnel configuration details.",
+                    required = true
+            ) FunnelEntity funnelEntity) {
 
         if (funnelEntity == null || funnelEntity.getFunnel() == null) {
             throw new IllegalArgumentException("Funnel details must be specified.");
@@ -415,20 +526,52 @@ public class FunnelResource extends ApplicationResource {
      * Removes the specified funnel.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param version The revision is used to verify the client is working with
+     * the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a
+     * new one will be generated. This value (whether specified or generated) is
+     * included in the response.
      * @param id The id of the funnel to remove.
      * @return A entity containing the client id and an updated revision.
      */
     @DELETE
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(FunnelEntity.class)
+    @ApiOperation(
+            value = "Deletes a funnel",
+            response = FunnelEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response removeFunnel(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
+                    required = false
+            )
             @QueryParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The funnel id.",
+                    required = true
+            )
             @PathParam("id") String id) {
 
         // replicate if cluster manager

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9bdc752a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
index 4c6b313..ec5a83c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
@@ -17,6 +17,11 @@
 package org.apache.nifi.web.api;
 
 import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -103,10 +108,34 @@ public class InputPortResource extends ApplicationResource {
      * @return A inputPortsEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(InputPortsEntity.class)
-    public Response getInputPorts(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets all input ports",
+            response = InputPortsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getInputPorts(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -143,6 +172,7 @@ public class InputPortResource extends ApplicationResource {
     @POST
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(InputPortEntity.class)
     public Response createInputPort(
@@ -189,11 +219,30 @@ public class InputPortResource extends ApplicationResource {
     @POST
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(InputPortEntity.class)
+    @ApiOperation(
+            value = "Creates an input port",
+            response = InputPortEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createInputPort(
             @Context HttpServletRequest httpServletRequest,
-            InputPortEntity portEntity) {
+            @ApiParam(
+                    value = "The input port configuration details.",
+                    required = true
+            ) InputPortEntity portEntity) {
 
         if (portEntity == null || portEntity.getInputPort() == null) {
             throw new IllegalArgumentException("Port details must be specified.");
@@ -268,11 +317,40 @@ public class InputPortResource extends ApplicationResource {
      * @return A inputPortEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(InputPortEntity.class)
-    public Response getInputPort(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Getrs an input port",
+            response = InputPortEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getInputPort(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The input port id.",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -389,10 +467,33 @@ public class InputPortResource extends ApplicationResource {
     @Path("{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(InputPortEntity.class)
+    @ApiOperation(
+            value = "Updates an input port",
+            response = InputPortEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateInputPort(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The input port id.",
+                    required = true
+            )
             @PathParam("id") String id,
-            InputPortEntity portEntity) {
+            @ApiParam(
+                    value = "The input port configuration details.",
+                    required = true
+            ) InputPortEntity portEntity) {
 
         if (portEntity == null || portEntity.getInputPort() == null) {
             throw new IllegalArgumentException("Input port details must be specified.");
@@ -458,14 +559,43 @@ public class InputPortResource extends ApplicationResource {
      * @return A inputPortEntity.
      */
     @DELETE
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(InputPortEntity.class)
+    @ApiOperation(
+            value = "Deletes an input port",
+            response = InputPortEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response removeInputPort(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow.",
+                    required = false
+            )
             @QueryParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The input port id.",
+                    required = true
+            )
             @PathParam("id") String id) {
 
         // replicate if cluster manager


[20/50] [abbrv] incubator-nifi git commit: NIFI-527: Code cleanup

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocReader.java
index 8944cec..7c13a2a 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocReader.java
@@ -24,9 +24,9 @@ import java.io.IOException;
 
 /**
  * Standard implementation of TocReader.
- * 
+ *
  * Expects .toc file to be in the following format;
- * 
+ *
  * byte 0: version
  * byte 1: boolean: compressionFlag -> 0 = journal is NOT compressed, 1 = journal is compressed
  * byte 2-9: long: offset of block 0
@@ -37,21 +37,21 @@ import java.io.IOException;
 public class StandardTocReader implements TocReader {
     private final boolean compressed;
     private final long[] offsets;
-    
+
     public StandardTocReader(final File file) throws IOException {
         try (final FileInputStream fis = new FileInputStream(file);
-             final DataInputStream dis = new DataInputStream(fis)) {
-            
+                final DataInputStream dis = new DataInputStream(fis)) {
+
             final int version = dis.read();
             if ( version < 0 ) {
                 throw new EOFException();
             }
-            
+
             final int compressionFlag = dis.read();
             if ( compressionFlag < 0 ) {
                 throw new EOFException();
             }
-            
+
             if ( compressionFlag == 0 ) {
                 compressed = false;
             } else if ( compressionFlag == 1 ) {
@@ -59,21 +59,21 @@ public class StandardTocReader implements TocReader {
             } else {
                 throw new IOException("Table of Contents appears to be corrupt: could not read 'compression flag' from header; expected value of 0 or 1 but got " + compressionFlag);
             }
-            
+
             final int numBlocks = (int) ((file.length() - 2) / 8);
             offsets = new long[numBlocks];
-            
+
             for (int i=0; i < numBlocks; i++) {
                 offsets[i] = dis.readLong();
             }
         }
     }
-    
+
     @Override
     public boolean isCompressed() {
         return compressed;
     }
-    
+
     @Override
     public long getBlockOffset(final int blockIndex) {
         if ( blockIndex >= offsets.length ) {
@@ -89,20 +89,20 @@ public class StandardTocReader implements TocReader {
         }
         return offsets[offsets.length - 1];
     }
-    
+
     @Override
     public void close() throws IOException {
     }
 
-	@Override
-	public int getBlockIndex(final long blockOffset) {
-		for (int i=0; i < offsets.length; i++) {
-			if ( offsets[i] > blockOffset ) {
-				return i-1;
-			}
-		}
-		
-		return offsets.length - 1;
-	}
+    @Override
+    public int getBlockIndex(final long blockOffset) {
+        for (int i=0; i < offsets.length; i++) {
+            if ( offsets[i] > blockOffset ) {
+                return i-1;
+            }
+        }
+
+        return offsets.length - 1;
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocWriter.java
index 488f225..10de459 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/StandardTocWriter.java
@@ -19,7 +19,6 @@ package org.apache.nifi.provenance.toc;
 import java.io.BufferedOutputStream;
 import java.io.DataOutputStream;
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.file.Files;
@@ -29,7 +28,7 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Standard implementation of {@link TocWriter}.
- * 
+ *
  * Format of .toc file:
  * byte 0: version
  * byte 1: compressed: 0 -> not compressed, 1 -> compressed
@@ -39,27 +38,27 @@ import org.slf4j.LoggerFactory;
  * byte (N*8+2)-(N*8+9): long: offset of block N
  */
 public class StandardTocWriter implements TocWriter {
-	private static final Logger logger = LoggerFactory.getLogger(StandardTocWriter.class);
-	
+    private static final Logger logger = LoggerFactory.getLogger(StandardTocWriter.class);
+
     public static final byte VERSION = 1;
-    
+
     private final File file;
     private final FileOutputStream fos;
     private final boolean alwaysSync;
     private int index = -1;
-    
+
     /**
      * Creates a StandardTocWriter that writes to the given file.
      * @param file the file to write to
      * @param compressionFlag whether or not the journal is compressed
-     * @throws FileNotFoundException 
+     * @throws IOException if unable to write header info to the specified file
      */
     public StandardTocWriter(final File file, final boolean compressionFlag, final boolean alwaysSync) throws IOException {
         final File tocDir = file.getParentFile();
         if ( !tocDir.exists() ) {
-        	Files.createDirectories(tocDir.toPath());
+            Files.createDirectories(tocDir.toPath());
         }
-        
+
         this.file = file;
         fos = new FileOutputStream(file);
         this.alwaysSync = alwaysSync;
@@ -69,12 +68,12 @@ public class StandardTocWriter implements TocWriter {
         header[1] = (byte) (compressionFlag ? 1 : 0);
         fos.write(header);
         fos.flush();
-        
+
         if ( alwaysSync ) {
             sync();
         }
     }
-    
+
     @Override
     public void addBlockOffset(final long offset) throws IOException {
         final BufferedOutputStream bos = new BufferedOutputStream(fos);
@@ -83,17 +82,17 @@ public class StandardTocWriter implements TocWriter {
         dos.flush();
         index++;
         logger.debug("Adding block {} at offset {}", index, offset);
-        
+
         if ( alwaysSync ) {
             sync();
         }
     }
-    
+
     @Override
     public void sync() throws IOException {
-    	fos.getFD().sync();
+        fos.getFD().sync();
     }
-    
+
     @Override
     public int getCurrentBlockIndex() {
         return index;
@@ -104,15 +103,15 @@ public class StandardTocWriter implements TocWriter {
         if (alwaysSync) {
             fos.getFD().sync();
         }
-        
+
         fos.close();
     }
-    
+
     @Override
     public File getFile() {
         return file;
     }
-    
+
     @Override
     public String toString() {
         return "TOC Writer for " + file;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocReader.java
index 7c197be..97e2838 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocReader.java
@@ -32,27 +32,31 @@ public interface TocReader extends Closeable {
 
     /**
      * Indicates whether or not the corresponding Journal file is compressed
-     * @return
+     * @return <code>true</code> if the event file is compressed
      */
     boolean isCompressed();
 
     /**
      * Returns the byte offset into the Journal File for the Block with the given index.
-     * @param blockIndex
-     * @return
+     *
+     * @param blockIndex the block index to get the byte offset for
+     * @return the byte offset for the given block index, or <code>-1</code> if the given block index
+     * does not exist
      */
     long getBlockOffset(int blockIndex);
-    
+
     /**
      * Returns the byte offset into the Journal File of the last Block in the given index
-     * @return
+     * @return the byte offset into the Journal File of the last Block in the given index
      */
     long getLastBlockOffset();
-    
+
     /**
      * Returns the index of the block that contains the given offset
-     * @param blockOffset
-     * @return
+     *
+     * @param blockOffset the byte offset for which the block index is desired
+     *
+     * @return the index of the block that contains the given offset
      */
     int getBlockIndex(long blockOffset);
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocUtil.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocUtil.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocUtil.java
index c30ac98..3fa7d67 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocUtil.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocUtil.java
@@ -22,16 +22,19 @@ import org.apache.nifi.provenance.lucene.LuceneUtil;
 
 public class TocUtil {
 
-	/**
-	 * Returns the file that should be used as the Table of Contents for the given Journal File
-	 * @param journalFile
-	 * @return
-	 */
-	public static File getTocFile(final File journalFile) {
-    	final File tocDir = new File(journalFile.getParentFile(), "toc");
-    	final String basename = LuceneUtil.substringBefore(journalFile.getName(), ".");
-    	final File tocFile = new File(tocDir, basename + ".toc");
-    	return tocFile;
-	}
-	
+    /**
+     * Returns the file that should be used as the Table of Contents for the given Journal File.
+     * Note, if no TOC exists for the given Journal File, a File will still be returned but the file
+     * will not actually exist.
+     *
+     * @param journalFile the journal file for which to get the Table of Contents
+     * @return the file that represents the Table of Contents for the specified journal file.
+     */
+    public static File getTocFile(final File journalFile) {
+        final File tocDir = new File(journalFile.getParentFile(), "toc");
+        final String basename = LuceneUtil.substringBefore(journalFile.getName(), ".");
+        final File tocFile = new File(tocDir, basename + ".toc");
+        return tocFile;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocWriter.java
index c678053..38f910f 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/toc/TocWriter.java
@@ -27,26 +27,24 @@ public interface TocWriter extends Closeable {
 
     /**
      * Adds the given block offset as the next Block Offset in the Table of Contents
-     * @param offset
-     * @throws IOException
+     * @param offset the byte offset at which the block begins
+     * @throws IOException if unable to persist the block index
      */
     void addBlockOffset(long offset) throws IOException;
-    
+
     /**
-     * Returns the index of the current Block
-     * @return
+     * @return the index of the current Block
      */
     int getCurrentBlockIndex();
-    
+
     /**
-     * Returns the file that is currently being written to
-     * @return
+     * @return the file that is currently being written to
      */
     File getFile();
 
     /**
      * Synchronizes the data with the underlying storage device
-     * @throws IOException
+     * @throws IOException if unable to synchronize the data with the underlying storage device
      */
     void sync() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
index 5541ab5..7d97bcd 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
@@ -75,7 +75,7 @@ public class TestPersistentProvenanceRepository {
 
     private PersistentProvenanceRepository repo;
     private RepositoryConfiguration config;
-    
+
     public static final int DEFAULT_ROLLOVER_MILLIS = 2000;
 
     private RepositoryConfiguration createConfiguration() {
@@ -89,9 +89,9 @@ public class TestPersistentProvenanceRepository {
 
     @BeforeClass
     public static void setLogLevel() {
-    	System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
     }
-    
+
     @Before
     public void printTestName() {
         System.out.println("\n\n\n***********************  " + name.getMethodName() + "  *****************************");
@@ -105,33 +105,33 @@ public class TestPersistentProvenanceRepository {
             } catch (final IOException ioe) {
             }
         }
-        
+
         // Delete all of the storage files. We do this in order to clean up the tons of files that
         // we create but also to ensure that we have closed all of the file handles. If we leave any
         // streams open, for instance, this will throw an IOException, causing our unit test to fail.
         for ( final File storageDir : config.getStorageDirectories() ) {
-        	int i;
-        	for (i=0; i < 3; i++) {
-        		try {
-        			FileUtils.deleteFile(storageDir, true);
-        			break;
-	        	} catch (final IOException ioe) {
-	        		// if there is a virus scanner, etc. running in the background we may not be able to
-	        		// delete the file. Wait a sec and try again.
-	        		if ( i == 2 ) {
-	        			throw ioe;
-	        		} else {
-	        			try {
-	        				Thread.sleep(1000L);
-	        			} catch (final InterruptedException ie) {
-	        			}
-	        		}
-	        	}
-	        }
+            int i;
+            for (i=0; i < 3; i++) {
+                try {
+                    FileUtils.deleteFile(storageDir, true);
+                    break;
+                } catch (final IOException ioe) {
+                    // if there is a virus scanner, etc. running in the background we may not be able to
+                    // delete the file. Wait a sec and try again.
+                    if ( i == 2 ) {
+                        throw ioe;
+                    } else {
+                        try {
+                            Thread.sleep(1000L);
+                        } catch (final InterruptedException ie) {
+                        }
+                    }
+                }
+            }
         }
     }
 
-    
+
 
     private EventReporter getEventReporter() {
         return new EventReporter() {
@@ -241,7 +241,7 @@ public class TestPersistentProvenanceRepository {
         }
 
         Thread.sleep(1000L);
-        
+
         repo.close();
         Thread.sleep(500L); // Give the repo time to shutdown (i.e., close all file handles, etc.)
 
@@ -431,7 +431,7 @@ public class TestPersistentProvenanceRepository {
         repo.waitForRollover();
 
         final Query query = new Query(UUID.randomUUID().toString());
-//        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "00000000-0000-0000-0000*"));
+        //        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "00000000-0000-0000-0000*"));
         query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
         query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
         query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
@@ -905,14 +905,14 @@ public class TestPersistentProvenanceRepository {
         secondRepo.initialize(getEventReporter());
 
         try {
-	        final ProvenanceEventRecord event11 = builder.build();
-	        secondRepo.registerEvent(event11);
-	        secondRepo.waitForRollover();
-	        final ProvenanceEventRecord event11Retrieved = secondRepo.getEvent(10L);
-	        assertNotNull(event11Retrieved);
-	        assertEquals(10, event11Retrieved.getEventId());
+            final ProvenanceEventRecord event11 = builder.build();
+            secondRepo.registerEvent(event11);
+            secondRepo.waitForRollover();
+            final ProvenanceEventRecord event11Retrieved = secondRepo.getEvent(10L);
+            assertNotNull(event11Retrieved);
+            assertEquals(10, event11Retrieved.getEventId());
         } finally {
-        	secondRepo.close();
+            secondRepo.close();
         }
     }
 
@@ -983,26 +983,26 @@ public class TestPersistentProvenanceRepository {
         storageDirFiles = config.getStorageDirectories().get(0).listFiles(indexFileFilter);
         assertEquals(0, storageDirFiles.length);
     }
-    
-    
+
+
     @Test
     public void testBackPressure() throws IOException, InterruptedException {
         final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileCapacity(1L);	// force rollover on each record.
+        config.setMaxEventFileCapacity(1L);  // force rollover on each record.
         config.setJournalCount(1);
-        
+
         final AtomicInteger journalCountRef = new AtomicInteger(0);
-        
-    	repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-    		@Override
-    		protected int getJournalCount() {
-    			return journalCountRef.get();
-    		}
-    	};
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            protected int getJournalCount() {
+                return journalCountRef.get();
+            }
+        };
         repo.initialize(getEventReporter());
 
-    	final Map<String, String> attributes = new HashMap<>();
-    	final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        final Map<String, String> attributes = new HashMap<>();
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
         builder.setEventTime(System.currentTimeMillis());
         builder.setEventType(ProvenanceEventType.RECEIVE);
         builder.setTransitUri("nifi://unit-test");
@@ -1023,31 +1023,31 @@ public class TestPersistentProvenanceRepository {
 
         final AtomicLong threadNanos = new AtomicLong(0L);
         final Thread t = new Thread(new Runnable() {
-			@Override
-			public void run() {
-				final long start = System.nanoTime();
-		        builder.fromFlowFile(createFlowFile(13, 3000L, attributes));
-		        attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 13);
-		        repo.registerEvent(builder.build());
-		        threadNanos.set(System.nanoTime() - start);
-			}
+            @Override
+            public void run() {
+                final long start = System.nanoTime();
+                builder.fromFlowFile(createFlowFile(13, 3000L, attributes));
+                attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 13);
+                repo.registerEvent(builder.build());
+                threadNanos.set(System.nanoTime() - start);
+            }
         });
         t.start();
 
         Thread.sleep(1500L);
-        
+
         journalCountRef.set(1);
         t.join();
-        
+
         final int threadMillis = (int) TimeUnit.NANOSECONDS.toMillis(threadNanos.get());
-        assertTrue(threadMillis > 1200);	// use 1200 to account for the fact that the timing is not exact
-        
+        assertTrue(threadMillis > 1200); // use 1200 to account for the fact that the timing is not exact
+
         builder.fromFlowFile(createFlowFile(15, 3000L, attributes));
         attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 15);
         repo.registerEvent(builder.build());
     }
-    
-    
+
+
     // TODO: test EOF on merge
     // TODO: Test journal with no records
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestStandardRecordReaderWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestStandardRecordReaderWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestStandardRecordReaderWriter.java
index 6f85b94..136f244 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestStandardRecordReaderWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestStandardRecordReaderWriter.java
@@ -40,15 +40,15 @@ import org.junit.Test;
 public class TestStandardRecordReaderWriter {
     @BeforeClass
     public static void setLogLevel() {
-    	System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
     }
 
-	private ProvenanceEventRecord createEvent() {
-		final Map<String, String> attributes = new HashMap<>();
-		attributes.put("filename", "1.txt");
+    private ProvenanceEventRecord createEvent() {
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("filename", "1.txt");
         attributes.put("uuid", UUID.randomUUID().toString());
 
-		final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
         builder.setEventTime(System.currentTimeMillis());
         builder.setEventType(ProvenanceEventType.RECEIVE);
         builder.setTransitUri("nifi://unit-test");
@@ -58,132 +58,132 @@ public class TestStandardRecordReaderWriter {
         final ProvenanceEventRecord record = builder.build();
 
         return record;
-	}
-	
-	@Test
-	public void testSimpleWriteWithToc() throws IOException {
+    }
+
+    @Test
+    public void testSimpleWriteWithToc() throws IOException {
         final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite");
         final File tocFile = TocUtil.getTocFile(journalFile);
         final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
         final StandardRecordWriter writer = new StandardRecordWriter(journalFile, tocWriter, false, 1024 * 1024);
-        
+
         writer.writeHeader();
         writer.writeRecord(createEvent(), 1L);
         writer.close();
 
         final TocReader tocReader = new StandardTocReader(tocFile);
-        
+
         try (final FileInputStream fis = new FileInputStream(journalFile);
-        	final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
-        	assertEquals(0, reader.getBlockIndex());
-        	reader.skipToBlock(0);
-        	StandardProvenanceEventRecord recovered = reader.nextRecord();
-        	assertNotNull(recovered);
-        	
-        	assertEquals("nifi://unit-test", recovered.getTransitUri());
-        	assertNull(reader.nextRecord());
+                final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
+            assertEquals(0, reader.getBlockIndex());
+            reader.skipToBlock(0);
+            StandardProvenanceEventRecord recovered = reader.nextRecord();
+            assertNotNull(recovered);
+
+            assertEquals("nifi://unit-test", recovered.getTransitUri());
+            assertNull(reader.nextRecord());
         }
-        
+
         FileUtils.deleteFile(journalFile.getParentFile(), true);
-	}
-	
-	
-	@Test
-	public void testSingleRecordCompressed() throws IOException {
+    }
+
+
+    @Test
+    public void testSingleRecordCompressed() throws IOException {
         final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
         final File tocFile = TocUtil.getTocFile(journalFile);
         final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
         final StandardRecordWriter writer = new StandardRecordWriter(journalFile, tocWriter, true, 100);
-        
+
         writer.writeHeader();
         writer.writeRecord(createEvent(), 1L);
         writer.close();
 
         final TocReader tocReader = new StandardTocReader(tocFile);
-        
+
         try (final FileInputStream fis = new FileInputStream(journalFile);
-        	final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
-        	assertEquals(0, reader.getBlockIndex());
-        	reader.skipToBlock(0);
-        	StandardProvenanceEventRecord recovered = reader.nextRecord();
-        	assertNotNull(recovered);
-        	
-        	assertEquals("nifi://unit-test", recovered.getTransitUri());
-        	assertNull(reader.nextRecord());
+                final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
+            assertEquals(0, reader.getBlockIndex());
+            reader.skipToBlock(0);
+            StandardProvenanceEventRecord recovered = reader.nextRecord();
+            assertNotNull(recovered);
+
+            assertEquals("nifi://unit-test", recovered.getTransitUri());
+            assertNull(reader.nextRecord());
         }
-        
+
         FileUtils.deleteFile(journalFile.getParentFile(), true);
-	}
-	
-	
-	@Test
-	public void testMultipleRecordsSameBlockCompressed() throws IOException {
+    }
+
+
+    @Test
+    public void testMultipleRecordsSameBlockCompressed() throws IOException {
         final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
         final File tocFile = TocUtil.getTocFile(journalFile);
         final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
         // new record each 1 MB of uncompressed data
         final StandardRecordWriter writer = new StandardRecordWriter(journalFile, tocWriter, true, 1024 * 1024);
-        
+
         writer.writeHeader();
         for (int i=0; i < 10; i++) {
-        	writer.writeRecord(createEvent(), i);
+            writer.writeRecord(createEvent(), i);
         }
         writer.close();
 
         final TocReader tocReader = new StandardTocReader(tocFile);
-        
+
         try (final FileInputStream fis = new FileInputStream(journalFile);
-        	final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
-        	for (int i=0; i < 10; i++) {
-	        	assertEquals(0, reader.getBlockIndex());
-	        	
-	        	// call skipToBlock half the time to ensure that we can; avoid calling it
-	        	// the other half of the time to ensure that it's okay.
-	        	if (i <= 5) {
-	        		reader.skipToBlock(0);
-	        	}
-	        	
-	        	StandardProvenanceEventRecord recovered = reader.nextRecord();
-	        	assertNotNull(recovered);
-	        	assertEquals("nifi://unit-test", recovered.getTransitUri());
-        	}
-        	
-        	assertNull(reader.nextRecord());
+                final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
+            for (int i=0; i < 10; i++) {
+                assertEquals(0, reader.getBlockIndex());
+
+                // call skipToBlock half the time to ensure that we can; avoid calling it
+                // the other half of the time to ensure that it's okay.
+                if (i <= 5) {
+                    reader.skipToBlock(0);
+                }
+
+                StandardProvenanceEventRecord recovered = reader.nextRecord();
+                assertNotNull(recovered);
+                assertEquals("nifi://unit-test", recovered.getTransitUri());
+            }
+
+            assertNull(reader.nextRecord());
         }
-        
+
         FileUtils.deleteFile(journalFile.getParentFile(), true);
-	}
-	
-	
-	@Test
-	public void testMultipleRecordsMultipleBlocksCompressed() throws IOException {
+    }
+
+
+    @Test
+    public void testMultipleRecordsMultipleBlocksCompressed() throws IOException {
         final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
         final File tocFile = TocUtil.getTocFile(journalFile);
         final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
         // new block each 10 bytes
         final StandardRecordWriter writer = new StandardRecordWriter(journalFile, tocWriter, true, 100);
-        
+
         writer.writeHeader();
         for (int i=0; i < 10; i++) {
-        	writer.writeRecord(createEvent(), i);
+            writer.writeRecord(createEvent(), i);
         }
         writer.close();
 
         final TocReader tocReader = new StandardTocReader(tocFile);
-        
+
         try (final FileInputStream fis = new FileInputStream(journalFile);
-        	final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
-        	for (int i=0; i < 10; i++) {
-	        	StandardProvenanceEventRecord recovered = reader.nextRecord();
-	        	System.out.println(recovered);
-	        	assertNotNull(recovered);
-	        	assertEquals((long) i, recovered.getEventId());
-	        	assertEquals("nifi://unit-test", recovered.getTransitUri());
-        	}
-        	
-        	assertNull(reader.nextRecord());
+                final StandardRecordReader reader = new StandardRecordReader(fis, journalFile.getName(), tocReader)) {
+            for (int i=0; i < 10; i++) {
+                StandardProvenanceEventRecord recovered = reader.nextRecord();
+                System.out.println(recovered);
+                assertNotNull(recovered);
+                assertEquals((long) i, recovered.getEventId());
+                assertEquals("nifi://unit-test", recovered.getTransitUri());
+            }
+
+            assertNull(reader.nextRecord());
         }
-        
+
         FileUtils.deleteFile(journalFile.getParentFile(), true);
-	}
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestUtil.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestUtil.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestUtil.java
index 7459fe8..eb0f736 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestUtil.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestUtil.java
@@ -24,7 +24,7 @@ import java.util.Set;
 import org.apache.nifi.flowfile.FlowFile;
 
 public class TestUtil {
-	public static FlowFile createFlowFile(final long id, final long fileSize, final Map<String, String> attributes) {
+    public static FlowFile createFlowFile(final long id, final long fileSize, final Map<String, String> attributes) {
         final Map<String, String> attrCopy = new HashMap<>(attributes);
 
         return new FlowFile() {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocReader.java
index 30326e7..87400a0 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocReader.java
@@ -38,7 +38,7 @@ public class TestStandardTocReader {
             out.write(0);
             out.write(0);
         }
-        
+
         try {
             try(final StandardTocReader reader = new StandardTocReader(file)) {
                 assertFalse(reader.isCompressed());
@@ -46,13 +46,13 @@ public class TestStandardTocReader {
         } finally {
             file.delete();
         }
-        
-        
+
+
         try (final OutputStream out = new FileOutputStream(file)) {
             out.write(0);
             out.write(1);
         }
-        
+
         try {
             try(final StandardTocReader reader = new StandardTocReader(file)) {
                 assertTrue(reader.isCompressed());
@@ -61,25 +61,25 @@ public class TestStandardTocReader {
             file.delete();
         }
     }
-    
-    
+
+
     @Test
     public void testGetBlockIndex() throws IOException {
         final File file = new File("target/" + UUID.randomUUID().toString());
         try (final OutputStream out = new FileOutputStream(file);
-             final DataOutputStream dos = new DataOutputStream(out)) {
+                final DataOutputStream dos = new DataOutputStream(out)) {
             out.write(0);
             out.write(0);
-            
+
             for (int i=0; i < 1024; i++) {
                 dos.writeLong(i * 1024L);
             }
         }
-        
+
         try {
             try(final StandardTocReader reader = new StandardTocReader(file)) {
                 assertFalse(reader.isCompressed());
-                
+
                 for (int i=0; i < 1024; i++) {
                     assertEquals(i * 1024, reader.getBlockOffset(i));
                 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocWriter.java
index 70f55a2..aebe0d5 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/toc/TestStandardTocWriter.java
@@ -31,12 +31,12 @@ public class TestStandardTocWriter {
         final File tocFile = new File("target/" + UUID.randomUUID().toString() + ".toc");
         try {
             assertTrue( tocFile.createNewFile() );
-            
+
             try (final StandardTocWriter writer = new StandardTocWriter(tocFile, false, false)) {
             }
         } finally {
             FileUtils.deleteFile(tocFile, false);
         }
     }
-    
+
 }


[50/50] [abbrv] incubator-nifi git commit: NIFI-292: - Checkstyle clean up.

Posted by mc...@apache.org.
NIFI-292:
- Checkstyle clean up.

Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/46e691dd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/46e691dd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/46e691dd

Branch: refs/heads/NIFI-292
Commit: 46e691dd5b1bcddabf19321f398e17651edc4ea6
Parents: 5f2bd4f
Author: Matt Gilman <ma...@gmail.com>
Authored: Tue Apr 28 10:04:16 2015 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Tue Apr 28 10:04:16 2015 -0400

----------------------------------------------------------------------
 .../nifi/web/api/BulletinBoardResource.java     |  1 -
 .../apache/nifi/web/api/ClusterResource.java    | 63 ++++----------
 .../apache/nifi/web/api/ConnectionResource.java | 60 ++++----------
 .../apache/nifi/web/api/ControllerResource.java | 86 ++++++--------------
 4 files changed, 56 insertions(+), 154 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/46e691dd/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
index d496ed7..6cc1c8a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
@@ -20,7 +20,6 @@ import com.wordnik.swagger.annotations.Api;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
-import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/46e691dd/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
index 3572b7a..005a4a2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
@@ -111,9 +111,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the status of this NiFi cluster.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A clusterStatusEntity
      */
     @GET
@@ -184,12 +182,9 @@ public class ClusterResource extends ApplicationResource {
     }
 
     /**
-     * Gets the contents of this NiFi cluster. This includes all nodes and their
-     * status.
+     * Gets the contents of this NiFi cluster. This includes all nodes and their status.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A clusterEntity
      */
     @GET
@@ -319,9 +314,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A processorEntity
      */
@@ -385,11 +378,8 @@ public class ClusterResource extends ApplicationResource {
      * Updates the processors annotation data.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param processorId The id of the processor.
      * @param annotationData The annotation data to set.
      * @return A processorEntity.
@@ -539,9 +529,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusEntity
      */
@@ -604,9 +592,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the processor status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusHistoryEntity
      */
@@ -668,9 +654,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the connection status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusEntity
      */
@@ -733,9 +717,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the connections status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A clusterProcessorStatusHistoryEntity
      */
@@ -797,9 +779,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the process group status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the process group
      * @return A clusterProcessGroupStatusEntity
      */
@@ -862,9 +842,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the process group status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the process group
      * @return A clusterProcessGroupStatusHistoryEntity
      */
@@ -926,9 +904,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the remote process group status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group
      * @return A clusterRemoteProcessGroupStatusEntity
      */
@@ -991,9 +967,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the input port status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the input port
      * @return A clusterPortStatusEntity
      */
@@ -1056,9 +1030,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the output port status for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the output port
      * @return A clusterPortStatusEntity
      */
@@ -1121,9 +1093,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Gets the remote process group status history for every node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @return A clusterRemoteProcessGroupStatusHistoryEntity
      */
@@ -1183,7 +1153,6 @@ public class ClusterResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/46e691dd/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
index 4daea39..cd8b922 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
@@ -114,9 +114,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Gets all the connections.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A connectionsEntity.
      */
     @GET
@@ -173,9 +171,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection.
      * @return A connectionEntity.
      */
@@ -239,9 +235,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection to retrieve.
      * @return A statusHistoryEntity.
      */
@@ -306,11 +300,8 @@ public class ConnectionResource extends ApplicationResource {
      * Creates a connection.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of the connection.
      * @param sourceId The id of the source connectable.
      * @param sourceGroupId The parent group id for the source.
@@ -318,15 +309,10 @@ public class ConnectionResource extends ApplicationResource {
      * @param bends Array of bend points in string form ["x,y", "x,y", "x,y"]
      * @param relationships Array of relationships.
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply
-     * back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply
-     * back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer
-     * to one of the types in the GET /controller/prioritizers response. If this
-     * parameter is not specified no change will be made. If this parameter
-     * appears with no value (empty string), it will be treated as an empty
-     * array.
+     * @param backPressureObjectThreshold The object count for when to apply back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
+     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The parent group id for the destination.
      * @param destinationType The type of the destination connectable.
@@ -573,11 +559,8 @@ public class ConnectionResource extends ApplicationResource {
      * Updates the specified relationship target.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param connectionId The id of the source processor.
      * @param name The name of the connection.
      * @param relationships Array of relationships.
@@ -585,15 +568,10 @@ public class ConnectionResource extends ApplicationResource {
      * @param labelIndex The control point index for the connection label
      * @param zIndex The zIndex for this connection
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply
-     * back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply
-     * back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer
-     * to one of the types in the GET /controller/prioritizers response. If this
-     * parameter is not specified no change will be made. If this parameter
-     * appears with no value (empty string), it will be treated as an empty
-     * array.
+     * @param backPressureObjectThreshold The object count for when to apply back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
+     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The group id of the destination.
      * @param destinationType The type of the destination type.
@@ -828,11 +806,8 @@ public class ConnectionResource extends ApplicationResource {
      * Removes the specified connection.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection.
      * @return An Entity containing the client id and an updated revision.
      */
@@ -911,7 +886,6 @@ public class ConnectionResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/46e691dd/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
index b2a5956..8ecae5d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
@@ -249,8 +249,7 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Returns a 200 OK response to indicate this is a valid controller
-     * endpoint.
+     * Returns a 200 OK response to indicate this is a valid controller endpoint.
      *
      * @return An OK response with an empty entity body.
      */
@@ -267,9 +266,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Returns the details of this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerEntity.
      */
     @GET
@@ -364,22 +361,14 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Creates a new archive of this flow controller. Note, this is a POST
-     * operation that returns a URI that is not representative of the thing that
-     * was actually created. The archive that is created cannot be referenced at
-     * a later time, therefore there is no corresponding URI. Instead the
-     * request URI is returned.
+     * Creates a new archive of this flow controller. Note, this is a POST operation that returns a URI that is not representative of the thing that was actually created. The archive that is created
+     * cannot be referenced at a later time, therefore there is no corresponding URI. Instead the request URI is returned.
      *
-     * Alternatively, we could have performed a PUT request. However, PUT
-     * requests are supposed to be idempotent and this endpoint is certainly
-     * not.
+     * Alternatively, we could have performed a PUT request. However, PUT requests are supposed to be idempotent and this endpoint is certainly not.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processGroupEntity.
      */
     @POST
@@ -498,9 +487,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the status for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerStatusEntity.
      */
     @GET
@@ -551,9 +538,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the counters report for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A countersEntity.
      */
     @GET
@@ -605,9 +590,7 @@ public class ControllerResource extends ApplicationResource {
      * Update the specified counter. This will reset the counter value to 0.
      *
      * @param httpServletRequest request
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the counter.
      * @return A counterEntity.
      */
@@ -672,9 +655,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the configuration for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerConfigurationEntity.
      */
     @GET
@@ -733,17 +714,12 @@ public class ControllerResource extends ApplicationResource {
      * Update the configuration for this NiFi.
      *
      * @param httpServletRequest request
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of this controller.
      * @param comments The comments of this controller.
-     * @param maxTimerDrivenThreadCount The maximum number of timer driven
-     * threads this controller has available.
-     * @param maxEventDrivenThreadCount The maximum number of timer driven
-     * threads this controller has available.
+     * @param maxTimerDrivenThreadCount The maximum number of timer driven threads this controller has available.
+     * @param maxEventDrivenThreadCount The maximum number of timer driven threads this controller has available.
      * @return A controllerConfigurationEntity.
      */
     @PUT
@@ -871,12 +847,9 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Retrieves the user details, including the authorities, about the user
-     * making the request.
+     * Retrieves the user details, including the authorities, about the user making the request.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A authoritiesEntity.
      */
     @GET
@@ -932,9 +905,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the banners for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A bannerEntity.
      */
     @GET
@@ -995,9 +966,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of processors that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processorTypesEntity.
      */
     @GET
@@ -1051,9 +1020,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of controller services that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param serviceType Returns only services that implement this type
      * @return A controllerServicesTypesEntity.
      */
@@ -1113,9 +1080,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of reporting tasks that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerServicesTypesEntity.
      */
     @GET
@@ -1169,9 +1134,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of prioritizers that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A prioritizerTypesEntity.
      */
     @GET
@@ -1225,9 +1188,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves details about this NiFi to put in the About dialog.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return An aboutEntity.
      */
     @GET
@@ -1286,7 +1247,6 @@ public class ControllerResource extends ApplicationResource {
     }
 
     // setters
-    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }


[44/50] [abbrv] incubator-nifi git commit: NIFI-292: - Annotating endpoints using swagger. - Started building the template for the REST documentation.

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/index.html.hbs
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/index.html.hbs b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/index.html.hbs
new file mode 100644
index 0000000..3516247
--- /dev/null
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/index.html.hbs
@@ -0,0 +1,355 @@
+<!DOCTYPE html>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<html>
+    <head>
+        <title>{{info.title}}-{{info.version}}</title>
+        <script type="text/javascript" src="../nifi/js/jquery/jquery-2.1.1.min.js"></script>
+        <script type="text/javascript">
+            if (typeof window.jQuery === 'undefined') {
+                document.write(unescape('%3Cscript src="https://code.jquery.com/jquery-2.1.1.min.js" type="text/javascript" %3E%3C/script%3E'));
+            }
+        </script>
+        <style>
+            @import "https://fonts.googleapis.com/css?family=Open+Sans:300,300italic,400,400italic,600,600italic|Noto+Serif:400,400italic,700,700italic|Droid+Sans+Mono:400";
+
+            html, html a {
+                -webkit-font-smoothing: antialiased;
+                text-shadow: 1px 1px 1px rgba(0,0,0,0.004);
+            }
+
+            body {
+                width: 62.5em;
+                margin: 0 auto;
+                display: block;
+                font-family: "Open Sans", "DejaVu Sans", sans-serif;
+            }
+            
+            div.overview {
+                margin-bottom: 10px;
+            }
+            
+            div.endpoint {
+                margin-bottom: 10px;
+            }
+
+            /* get */
+            
+            div.endpoint.get {
+                border: 1px solid #174961;
+            }
+            
+            div.get div.operation-handle {
+                background-color: rgba(23, 73, 97, .15);
+            }
+            
+            div.get div.method {
+                background-color: #174961;
+            }
+            
+            div.get div.operation {
+                border-top: 1px solid #174961;
+            }
+            
+            /* post */
+            
+            div.endpoint.post {
+                border: 1px solid #7298AC;
+            }
+            
+            div.post div.operation-handle {
+                background-color: rgba(114, 152, 172, .15);
+            }
+            
+            div.post div.method {
+                background-color: #7298AC;
+            }
+            
+            div.post div.operation {
+                border-top: 1px solid #7298AC;
+            }
+            
+            /* put */
+            
+            div.endpoint.put {
+                border: 1px solid #063046;
+            }
+            
+            div.put div.operation-handle {
+                background-color: rgba(6, 48, 70, .15);
+            }
+            
+            div.put div.method {
+                background-color: #063046;
+            }
+            
+            div.put div.operation {
+                border-top: 1px solid #063046;
+            }
+            
+            /* delete */
+            
+            div.endpoint.delete {
+                border: 1px solid #47758E;
+            }
+            
+            div.delete div.operation-handle {
+                background-color: rgba(71, 117, 142, .15);
+            }
+            
+            div.delete div.method {
+                background-color: #47758E;
+            }
+            
+            div.delete div.operation {
+                border-top: 1px solid #47758E;
+            }
+            
+            /* operations */
+            
+            div.operation-handle {
+                cursor: pointer;
+                padding-right: 5px;
+                height: 22px;
+            }
+            
+            div.method {
+                float: left;
+                width: 75px;
+                color: #fff;
+                text-align: center;
+                background-color: #7098ad;
+                margin-right: 10px;
+                font-weight: bold;
+            }
+
+            div.endpoint div.path {
+                float: left;
+                line-height: 22px;
+            }
+
+            div.summary {
+                float: right;
+                font-size: 12px;
+                line-height: 22px;
+            }
+
+            div.operation {
+                padding: 5px;
+                font-size: 12px;
+            }
+
+            div.operation > div.title {
+                font-weight: bold;
+                color: #000;
+            }
+            
+            div.operation div.details {
+                margin-left: 5px;
+                margin-bottom: 5px;
+                color: #333;
+            }
+            
+            div.operation div.description {
+                margin-bottom: 10px;
+            }
+
+            div.mediatype {
+                line-height: 16px;
+            }
+
+            div.mediatype > div.title {
+                float: left;
+                width: 70px;
+            }
+            
+            div.mediatype div.title {
+                float: left;
+            }
+            
+            div.type {
+                position: fixed;
+                width: 800px;
+                height: 500px;
+                left: 50%;
+                top: 50%;
+                margin-left: -400px;
+                margin-top: -250px;
+                border: 3px solid #365C6A;
+                box-shadow: 4px 4px 6px rgba(0, 0, 0, 0.9);
+                padding: 10px;
+                background-color: #eee;
+                font-size: 12px;
+                overflow-y: scroll;
+            }
+            
+            /* tables */
+
+            table {
+                background-color: #fefefe;
+                border: 1px solid #ccc;
+                border-left: 6px solid #ccc;
+                color: #555;
+                display: block;
+                margin-bottom: 12px;
+                padding: 5px 8px;
+            }
+            
+            table th {
+                font-weight: bold;
+                vertical-align:top;
+                text-align:left;
+                padding: 4px 15px;
+                border-width: 0;
+                white-space: nowrap;
+            }
+            
+            table td {
+                vertical-align:top;
+                text-align:left;
+                padding: 2px 15px;
+                border-width: 0;
+                white-space: nowrap;
+            }
+            
+            table td:last-child {
+                width: 99%;
+                white-space: normal;
+            }
+            
+            code.example {
+                background-color: #fefefe;
+                border: 1px solid #ccc;
+                border-left: 6px solid #ccc;
+                color: #555;
+                margin-bottom: 10px;
+                padding: 5px 8px;
+                white-space: pre;
+                display: block;
+                tab-size: 4;
+                -moz-tab-size: 4;
+                -o-tab-size: 4;
+                line-height: 20px
+            }
+            
+            span.nested.collapsed {
+                cursor: pointer;
+                border: 1px solid #7298AC;
+                background-color: rgba(114, 152, 172, .15);
+                padding: 1px;
+            }
+            
+            /* general */
+            
+            .mono {
+                font-family: monospace;
+            }
+            
+            div.clear {
+                clear: both;
+            }
+
+            .hidden {
+                display: none;
+            }
+            
+            a {
+                cursor: pointer;
+                color: #1e373f;
+                font-weight: normal;
+            }
+            
+            a:hover {
+                color: #264c58;
+                text-decoration: underline;
+            }
+        </style>
+        <script type="text/javascript">
+            $(document).ready(function () {
+                // hide any open type dialogs
+                $('html').on('click', function() {
+                    $('div.type').hide();
+                });
+                
+                // populate all paths - this is necessary because the @key
+                // doesn't seem to reset after iterating through a nested 
+                // array or object
+                $('span.path').each(function() {
+                    var path = $(this);
+                    var endpoint = path.parent();
+                    endpoint.find('div.path').text(path.text());
+                });
+                
+                // toggles the visibility of a given operation
+                $('div.operation-handle').on('click', function () {
+                    $(this).next('div.operation').toggle();
+                });
+                
+                // add support for clicking to view the definition of a type
+                $('a.type-link').on('click', function(e) {
+                    // hide any previously shown dialogs
+                    $('div.type').hide();
+
+                    // show the type selected
+                    var link = $(this);
+                    var typeId = link.text();
+                    $('#' + typeId).show();
+                    e.stopPropagation();
+                });
+                
+                // prevent hiding when clicking on the type dialog
+                $('div.type').on('click', function(e) {
+                    e.stopPropagation();
+                });
+                
+                // populate nested examples
+                $('code.example').on('click', 'span.nested', function(e) {
+                    var nested = $(this).removeClass('collapsed');
+                    var nestedId = nested.find('span.nested-id');
+                    var nestedExample = nested.find('span.nested-example');
+                    
+                    // get the id of the nested example
+                    var typeId = nestedId.text();
+                    var example = $('#' + typeId + ' code.example').html();
+                    var depth = nestedId.parents('span.nested').length;
+                    
+                    // tab over as appropriate
+                    example = example.replace(/(\r\n|\r|\n)/g, function(match) {
+                        var tab = '\t';
+                        for (var i = 0; i < depth - 1; i++) {
+                            tab += '\t';
+                        }
+                        return match + tab;
+                    });
+                    
+                    // copy over the example
+                    nestedExample.html(example);
+                    e.stopPropagation();
+                });
+            });
+        </script>
+    </head>
+    <body>
+        <div class="overview">
+            <div class="title">{{info.title}}-{{info.version}}</div>
+        </div>
+        {{#each paths}}
+            {{> endpoint}}
+        {{/each}}
+        {{#each definitions}}
+            {{> type}}
+        {{/each}}
+    </body>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/operation.hbs
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/operation.hbs b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/operation.hbs
new file mode 100644
index 0000000..8c626bb
--- /dev/null
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/operation.hbs
@@ -0,0 +1,112 @@
+{{!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+--}}
+<div class="operation hidden">
+    {{#if description}}
+    <div class="description">
+        {{description}}
+    </div>
+    {{/if}}
+    <div class="title">Request</div>
+    <div class="mediatypes details">
+        {{#if consumes}}
+        <div class="mediatype"><div class="title">consumes:</div><div class="mono">{{join consumes ", "}}</div><div class="clear"></div></div>
+        {{/if}}
+    </div>
+    {{#if parameters}}
+    <table>
+        <thead>
+            <tr>
+                <th>Name</th>
+                <th>Location</th>
+                <th>Required</th>
+                <th>Type</th>
+                <th>Description</th>
+            </tr>
+        </thead>
+        <tbody>
+    {{/if}}
+    {{#each parameters}}
+        <tr>
+            <td>{{#ifeq in "body"}}{{else}}{{name}}{{/ifeq}}</td>
+            <td>{{in}}</td>
+            <td>
+                {{#ifeq in "body"}}
+                    yes
+                {{else}}
+                    {{#if required}}yes{{else}}no{{/if}}
+                {{/ifeq}}
+            </td>
+            {{#ifeq in "body"}}
+                <td>
+                {{#ifeq schema.type "array"}}Array[<a class="type-link" href="javascript:void(0);">{{basename schema.items.$ref}}</a>]{{/ifeq}}
+                {{#schema.$ref}}<a class="type-link" href="javascript:void(0);">{{basename schema.$ref}}</a> {{/schema.$ref}}
+                </td>
+            {{else}}
+                {{#ifeq type "array"}}
+                        <td>Array[{{items.type}}] ({{collectionFormat}})</td>
+                {{else}}
+                    {{#ifeq type "ref"}}
+                        <td>string</td>
+                    {{else}}
+                        <td>{{type}} {{#format}}({{format}}){{/format}}</td>
+                    {{/ifeq}}
+                {{/ifeq}}
+            {{/ifeq}}
+            <td>{{description}}</td>
+        </tr>
+    {{/each}}
+    {{#if parameters}}
+        </tbody>
+    </table>
+    {{/if}}
+    <div class="title">Response</div>
+    <div class="mediatypes details">
+        {{#if produces}}
+        <div class="mediatype"><div class="title">produces:</div><div class="mono">{{join produces ", "}}</div><div class="clear"></div></div>
+        {{/if}}
+    </div>
+    <table>
+        <thead>
+            <tr>
+                <th>Status Code</th>
+                <th>Type</th>
+                <th>Description</th>
+            </tr>
+        </thead>
+        <tbody>
+            {{#each responses}}
+            <tr>
+                <td>{{@key}}</td>
+                <td>
+                    {{#if schema}}
+                        {{#schema.$ref}}<a class="type-link" href="javascript:void(0);">{{basename schema.$ref}}</a>{{/schema.$ref}}
+                    {{else}}
+                        string
+                    {{/if}}
+                </td>
+                <td>{{description}}</td>
+            </tr>
+            {{/each}}
+        </tbody>
+    </table>
+    <div class="title">Authorization</div>
+    <div class="authorization details">
+        {{#security}}
+            {{#each this}}
+            <div>{{@key}}</div>
+            {{/each}}
+        {{/security}}
+    </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/type.hbs
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/type.hbs b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/type.hbs
new file mode 100644
index 0000000..08f7a02
--- /dev/null
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/type.hbs
@@ -0,0 +1,51 @@
+{{!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+--}}
+<div id="{{@key}}" class="type hidden">
+    <h3>{{@key}}</h3>
+    <table>
+        <tr>
+            <th>Name</th>
+            <th>Type</th>
+            <th>Required</th>
+            <th>Description</th>
+        </tr>
+        {{#each properties}}
+            <tr>
+                <td>{{@key}}</td>
+                <td>
+                    {{#ifeq type "array"}}
+                        {{#items.$ref}}
+                            {{type}}[<a class="type-link" href="javascript:void(0);">{{basename items.$ref}}</a>]
+                        {{/items.$ref}}
+                        {{^items.$ref}}
+                            {{type}}[{{items.type}}]
+                        {{/items.$ref}}
+                    {{else}}
+                        {{#$ref}}
+                            <a class="type-link" href="javascript:void(0);">{{basename $ref}}</a>
+                        {{/$ref}}
+                        {{^$ref}}
+                            {{type}}{{#format}} ({{format}}){{/format}}
+                        {{/$ref}}
+                    {{/ifeq}}
+                </td>
+                <td>{{#required}}required{{/required}}{{^required}}optional{{/required}}</td>
+                <td>{{#description}}{{{description}}}{{/description}}</td>
+            </tr>
+        {{/each}}
+    </table>
+    <h4>Example</h4>
+    <code class="example">&#123;{{> example}}&#125;</code>
+</div>
\ No newline at end of file


[40/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/FileUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/FileUtils.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/FileUtils.java
index 73c8aa0..daefd04 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/FileUtils.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/FileUtils.java
@@ -39,8 +39,7 @@ import java.util.Random;
 import org.slf4j.Logger;
 
 /**
- * A utility class containing a few useful static methods to do typical IO
- * operations.
+ * A utility class containing a few useful static methods to do typical IO operations.
  *
  * @author unattributed
  */
@@ -94,12 +93,10 @@ public class FileUtils {
     }
 
     /**
-     * Deletes the given file. If the given file exists but could not be deleted
-     * this will be printed as a warning to the given logger
+     * Deletes the given file. If the given file exists but could not be deleted this will be printed as a warning to the given logger
      *
      * @param file the file to delete
-     * @param logger the logger to provide logging information to about the
-     * operation
+     * @param logger the logger to provide logging information to about the operation
      * @return true if given file no longer exists
      */
     public static boolean deleteFile(final File file, final Logger logger) {
@@ -107,13 +104,11 @@ public class FileUtils {
     }
 
     /**
-     * Deletes the given file. If the given file exists but could not be deleted
-     * this will be printed as a warning to the given logger
+     * Deletes the given file. If the given file exists but could not be deleted this will be printed as a warning to the given logger
      *
      * @param file the file to delete
      * @param logger the logger to write to
-     * @param attempts indicates how many times an attempt to delete should be
-     * made
+     * @param attempts indicates how many times an attempt to delete should be made
      * @return true if given file no longer exists
      */
     public static boolean deleteFile(final File file, final Logger logger, final int attempts) {
@@ -143,8 +138,7 @@ public class FileUtils {
     }
 
     /**
-     * Deletes all of the given files. If any exist and cannot be deleted that
-     * will be printed at warn to the given logger.
+     * Deletes all of the given files. If any exist and cannot be deleted that will be printed at warn to the given logger.
      *
      * @param files can be null
      * @param logger can be null
@@ -154,13 +148,11 @@ public class FileUtils {
     }
 
     /**
-     * Deletes all of the given files. If any exist and cannot be deleted that
-     * will be printed at warn to the given logger.
+     * Deletes all of the given files. If any exist and cannot be deleted that will be printed at warn to the given logger.
      *
      * @param files can be null
      * @param logger can be null
-     * @param attempts indicates how many times an attempt should be made to
-     * delete each file
+     * @param attempts indicates how many times an attempt should be made to delete each file
      */
     public static void deleteFile(final List<File> files, final Logger logger, final int attempts) {
         if (null == files || files.isEmpty()) {
@@ -188,9 +180,8 @@ public class FileUtils {
     }
 
     /**
-     * Deletes all files (not directories..) in the given directory (non
-     * recursive) that match the given filename filter. If any file cannot be
-     * deleted then this is printed at warn to the given logger.
+     * Deletes all files (not directories..) in the given directory (non recursive) that match the given filename filter. If any file cannot be deleted then this is printed at warn to the given
+     * logger.
      *
      * @param directory the directory to scan for files to delete
      * @param filter if null then no filter is used
@@ -201,9 +192,7 @@ public class FileUtils {
     }
 
     /**
-     * Deletes all files (not directories) in the given directory (recursive)
-     * that match the given filename filter. If any file cannot be deleted then
-     * this is printed at warn to the given logger.
+     * Deletes all files (not directories) in the given directory (recursive) that match the given filename filter. If any file cannot be deleted then this is printed at warn to the given logger.
      *
      * @param directory the directory to scan
      * @param filter if null then no filter is used
@@ -215,16 +204,13 @@ public class FileUtils {
     }
 
     /**
-     * Deletes all files (not directories) in the given directory (recursive)
-     * that match the given filename filter. If any file cannot be deleted then
-     * this is printed at warn to the given logger.
+     * Deletes all files (not directories) in the given directory (recursive) that match the given filename filter. If any file cannot be deleted then this is printed at warn to the given logger.
      *
      * @param directory the directory to scan
      * @param filter if null then no filter is used
      * @param logger the logger
      * @param recurse whether to recurse subdirectories or not
-     * @param deleteEmptyDirectories default is false; if true will delete
-     * directories found that are empty
+     * @param deleteEmptyDirectories default is false; if true will delete directories found that are empty
      */
     public static void deleteFilesInDir(final File directory, final FilenameFilter filter, final Logger logger, final boolean recurse, final boolean deleteEmptyDirectories) {
         // ensure the specified directory is actually a directory and that it exists
@@ -269,11 +255,9 @@ public class FileUtils {
     }
 
     /**
-     * Randomly generates a sequence of bytes and overwrites the contents of the
-     * file a number of times. The file is then deleted.
+     * Randomly generates a sequence of bytes and overwrites the contents of the file a number of times. The file is then deleted.
      *
-     * @param file File to be overwritten a number of times and, ultimately,
-     * deleted
+     * @param file File to be overwritten a number of times and, ultimately, deleted
      * @param passes Number of times file should be overwritten
      * @throws IOException if something makes shredding or deleting a problem
      */
@@ -349,29 +333,19 @@ public class FileUtils {
     }
 
     /**
-     * Copies the given source file to the given destination file. The given
-     * destination will be overwritten if it already exists.
+     * Copies the given source file to the given destination file. The given destination will be overwritten if it already exists.
      *
      * @param source the file to copy
      * @param destination the file to copy to
-     * @param lockInputFile if true will lock input file during copy; if false
-     * will not
-     * @param lockOutputFile if true will lock output file during copy; if false
-     * will not
-     * @param move if true will perform what is effectively a move operation
-     * rather than a pure copy. This allows for potentially highly efficient
-     * movement of the file but if not possible this will revert to a copy then
-     * delete behavior. If false, then the file is copied and the source file is
-     * retained. If a true rename/move occurs then no lock is held during that
-     * time.
-     * @param logger if failures occur, they will be logged to this logger if
-     * possible. If this logger is null, an IOException will instead be thrown,
-     * indicating the problem.
+     * @param lockInputFile if true will lock input file during copy; if false will not
+     * @param lockOutputFile if true will lock output file during copy; if false will not
+     * @param move if true will perform what is effectively a move operation rather than a pure copy. This allows for potentially highly efficient movement of the file but if not possible this will
+     * revert to a copy then delete behavior. If false, then the file is copied and the source file is retained. If a true rename/move occurs then no lock is held during that time.
+     * @param logger if failures occur, they will be logged to this logger if possible. If this logger is null, an IOException will instead be thrown, indicating the problem.
      * @return long number of bytes copied
      * @throws FileNotFoundException if the source file could not be found
      * @throws IOException if unable to read or write the underlying streams
-     * @throws SecurityException if a security manager denies the needed file
-     * operations
+     * @throws SecurityException if a security manager denies the needed file operations
      */
     public static long copyFile(final File source, final File destination, final boolean lockInputFile, final boolean lockOutputFile, final boolean move, final Logger logger)
             throws FileNotFoundException, IOException {
@@ -434,21 +408,17 @@ public class FileUtils {
     }
 
     /**
-     * Copies the given source file to the given destination file. The given
-     * destination will be overwritten if it already exists.
+     * Copies the given source file to the given destination file. The given destination will be overwritten if it already exists.
      *
      * @param source the file to copy from
      * @param destination the file to copy to
-     * @param lockInputFile if true will lock input file during copy; if false
-     * will not
-     * @param lockOutputFile if true will lock output file during copy; if false
-     * will not
+     * @param lockInputFile if true will lock input file during copy; if false will not
+     * @param lockOutputFile if true will lock output file during copy; if false will not
      * @param logger the logger to use
      * @return long number of bytes copied
      * @throws FileNotFoundException if the source file could not be found
      * @throws IOException if unable to read or write to file
-     * @throws SecurityException if a security manager denies the needed file
-     * operations
+     * @throws SecurityException if a security manager denies the needed file operations
      */
     public static long copyFile(final File source, final File destination, final boolean lockInputFile, final boolean lockOutputFile, final Logger logger) throws FileNotFoundException, IOException {
         return FileUtils.copyFile(source, destination, lockInputFile, lockOutputFile, false, logger);
@@ -497,10 +467,8 @@ public class FileUtils {
     }
 
     /**
-     * Renames the given file from the source path to the destination path. This
-     * handles multiple attempts. This should only be used to rename within a
-     * given directory. Renaming across directories might not work well. See the
-     * <code>File.renameTo</code> for more information.
+     * Renames the given file from the source path to the destination path. This handles multiple attempts. This should only be used to rename within a given directory. Renaming across directories
+     * might not work well. See the <code>File.renameTo</code> for more information.
      *
      * @param source the file to rename
      * @param destination the file path to rename to
@@ -512,19 +480,14 @@ public class FileUtils {
     }
 
     /**
-     * Renames the given file from the source path to the destination path. This
-     * handles multiple attempts. This should only be used to rename within a
-     * given directory. Renaming across directories might not work well. See the
-     * <code>File.renameTo</code> for more information.
+     * Renames the given file from the source path to the destination path. This handles multiple attempts. This should only be used to rename within a given directory. Renaming across directories
+     * might not work well. See the <code>File.renameTo</code> for more information.
      *
      * @param source the file to rename
      * @param destination the file path to rename to
      * @param maxAttempts the max number of attempts to attempt the rename
-     * @param replace if true and a rename attempt fails will check if a file is
-     * already at the destination path. If so it will delete that file and
-     * attempt the rename according the remaining maxAttempts. If false, any
-     * conflicting files will be left as they were and the rename attempts will
-     * fail if conflicting.
+     * @param replace if true and a rename attempt fails will check if a file is already at the destination path. If so it will delete that file and attempt the rename according the remaining
+     * maxAttempts. If false, any conflicting files will be left as they were and the rename attempts will fail if conflicting.
      * @throws IOException if rename isn't successful
      */
     public static void renameFile(final File source, final File destination, final int maxAttempts, final boolean replace) throws IOException {
@@ -553,20 +516,15 @@ public class FileUtils {
     }
 
     /**
-     * Syncs a primary copy of a file with the copy in the restore directory. If
-     * the restore directory does not have a file and the primary has a file,
-     * the the primary's file is copied to the restore directory. Else if the
-     * restore directory has a file, but the primary does not, then the
-     * restore's file is copied to the primary directory. Else if the primary
-     * file is different than the restore file, then an IllegalStateException is
-     * thrown. Otherwise, if neither file exists, then no syncing is performed.
+     * Syncs a primary copy of a file with the copy in the restore directory. If the restore directory does not have a file and the primary has a file, the the primary's file is copied to the restore
+     * directory. Else if the restore directory has a file, but the primary does not, then the restore's file is copied to the primary directory. Else if the primary file is different than the restore
+     * file, then an IllegalStateException is thrown. Otherwise, if neither file exists, then no syncing is performed.
      *
      * @param primaryFile the primary file
      * @param restoreFile the restore file
      * @param logger a logger
      * @throws IOException if an I/O problem was encountered during syncing
-     * @throws IllegalStateException if the primary and restore copies exist but
-     * are different
+     * @throws IllegalStateException if the primary and restore copies exist but are different
      */
     public static void syncWithRestore(final File primaryFile, final File restoreFile, final Logger logger)
             throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/CompoundUpdateMonitor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/CompoundUpdateMonitor.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/CompoundUpdateMonitor.java
index 6f9c616..dc60318 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/CompoundUpdateMonitor.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/CompoundUpdateMonitor.java
@@ -22,11 +22,8 @@ import java.util.ArrayList;
 import java.util.List;
 
 /**
- * An {@link UpdateMonitor} that combines multiple <code>UpdateMonitor</code>s
- * such that it will indicate a change in a file only if ALL sub-monitors
- * indicate a change. The sub-monitors will be applied in the order given and if
- * any indicates that the state has not changed, the subsequent sub-monitors may
- * not be given a chance to run
+ * An {@link UpdateMonitor} that combines multiple <code>UpdateMonitor</code>s such that it will indicate a change in a file only if ALL sub-monitors indicate a change. The sub-monitors will be
+ * applied in the order given and if any indicates that the state has not changed, the subsequent sub-monitors may not be given a chance to run
  */
 public class CompoundUpdateMonitor implements UpdateMonitor {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/SynchronousFileWatcher.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/SynchronousFileWatcher.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/SynchronousFileWatcher.java
index 270d4d7..0040037 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/SynchronousFileWatcher.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/file/monitor/SynchronousFileWatcher.java
@@ -23,8 +23,7 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
 /**
- * Allows the user to configure a {@link java.nio.file.Path Path} to watch for
- * modifications and periodically poll to check if the file has been modified
+ * Allows the user to configure a {@link java.nio.file.Path Path} to watch for modifications and periodically poll to check if the file has been modified
  */
 public class SynchronousFileWatcher {
 
@@ -58,8 +57,7 @@ public class SynchronousFileWatcher {
     }
 
     /**
-     * Checks if the file has been updated according to the configured
-     * {@link UpdateMonitor} and resets the state
+     * Checks if the file has been updated according to the configured {@link UpdateMonitor} and resets the state
      *
      * @return true if updated; false otherwise
      * @throws IOException if failure occurs checking for changes

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/Search.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/Search.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/Search.java
index f93902f..b407c4d 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/Search.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/Search.java
@@ -23,32 +23,26 @@ import java.util.Set;
 import org.apache.nifi.util.search.ahocorasick.SearchState;
 
 /**
- * Defines an interface to search for content given a set of search terms. Any
- * implementation of search must be thread safe.
+ * Defines an interface to search for content given a set of search terms. Any implementation of search must be thread safe.
  *
  */
 public interface Search<T> {
 
     /**
-     * Establishes the dictionary of terms which will be searched in subsequent
-     * search calls. This can be called only once
+     * Establishes the dictionary of terms which will be searched in subsequent search calls. This can be called only once
      *
      * @param terms the terms to create a dictionary of
      */
     void initializeDictionary(Set<SearchTerm<T>> terms);
 
     /**
-     * Searches the given input stream for matches between the already specified
-     * dictionary and the contents scanned.
+     * Searches the given input stream for matches between the already specified dictionary and the contents scanned.
      *
      * @param haystack the source data to scan for hits
-     * @param findAll if true will find all matches if false will find only the
-     * first match
-     * @return SearchState containing results Map might be empty which indicates
-     * no matches found but will not be null
+     * @param findAll if true will find all matches if false will find only the first match
+     * @return SearchState containing results Map might be empty which indicates no matches found but will not be null
      * @throws IOException Thrown for any exceptions occurring while searching.
-     * @throws IllegalStateException if the dictionary has not yet been
-     * initialized
+     * @throws IllegalStateException if the dictionary has not yet been initialized
      */
     SearchState<T> search(InputStream haystack, boolean findAll) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/SearchTerm.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/SearchTerm.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/SearchTerm.java
index a1d361e..48f8678 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/SearchTerm.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/SearchTerm.java
@@ -40,9 +40,8 @@ public class SearchTerm<T> {
     }
 
     /**
-     * Constructs a search term. Optionally performs a defensive copy of the
-     * given byte array. If the caller indicates a defensive copy is not
-     * necessary then they must not change the given arrays state any longer
+     * Constructs a search term. Optionally performs a defensive copy of the given byte array. If the caller indicates a defensive copy is not necessary then they must not change the given arrays
+     * state any longer
      *
      * @param bytes the bytes of the new search term
      * @param defensiveCopy if true will make a defensive copy; false otherwise


[06/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestCompressContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestCompressContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestCompressContent.java
index f5fea2c..699db39 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestCompressContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestCompressContent.java
@@ -30,133 +30,100 @@ public class TestCompressContent {
 
     @Test
     public void testBzip2DecompressConcatenated() throws Exception {
-        final TestRunner runner = TestRunners.
-                newTestRunner(CompressContent.class);
+        final TestRunner runner = TestRunners.newTestRunner(CompressContent.class);
         runner.setProperty(CompressContent.MODE, "decompress");
         runner.setProperty(CompressContent.COMPRESSION_FORMAT, "bzip2");
         runner.setProperty(CompressContent.UPDATE_FILENAME, "false");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFileConcat.txt.bz2"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFileConcat.txt.bz2"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        MockFlowFile flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
-        flowFile.assertContentEquals(Paths.
-                get("src/test/resources/CompressedData/SampleFileConcat.txt"));
+        MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
+        flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFileConcat.txt"));
         flowFile.assertAttributeEquals("filename", "SampleFileConcat.txt.bz2"); // not updating filename
     }
 
     @Test
     public void testBzip2Decompress() throws Exception {
-        final TestRunner runner = TestRunners.
-                newTestRunner(CompressContent.class);
+        final TestRunner runner = TestRunners.newTestRunner(CompressContent.class);
         runner.setProperty(CompressContent.MODE, "decompress");
         runner.setProperty(CompressContent.COMPRESSION_FORMAT, "bzip2");
         runner.setProperty(CompressContent.UPDATE_FILENAME, "true");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt.bz2"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.bz2"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        MockFlowFile flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
-        flowFile.assertContentEquals(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt"));
+        MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
+        flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt"));
         flowFile.assertAttributeEquals("filename", "SampleFile.txt");
 
         runner.clearTransferState();
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFile1.txt.bz2"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile1.txt.bz2"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
-        flowFile.assertContentEquals(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt"));
+        flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
+        flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt"));
         flowFile.assertAttributeEquals("filename", "SampleFile1.txt");
     }
 
     @Test
     public void testGzipDecompress() throws Exception {
-        final TestRunner runner = TestRunners.
-                newTestRunner(CompressContent.class);
+        final TestRunner runner = TestRunners.newTestRunner(CompressContent.class);
         runner.setProperty(CompressContent.MODE, "decompress");
         runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip");
-        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").
-                isValid());
+        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid());
 
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt.gz"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.gz"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        MockFlowFile flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
-        flowFile.assertContentEquals(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt"));
+        MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
+        flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt"));
         flowFile.assertAttributeEquals("filename", "SampleFile.txt");
 
         runner.clearTransferState();
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFile1.txt.gz"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile1.txt.gz"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
-        flowFile.assertContentEquals(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt"));
+        flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
+        flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt"));
         flowFile.assertAttributeEquals("filename", "SampleFile1.txt");
     }
 
     @Test
     public void testFilenameUpdatedOnCompress() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(CompressContent.class);
+        final TestRunner runner = TestRunners.newTestRunner(CompressContent.class);
         runner.setProperty(CompressContent.MODE, "compress");
         runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip");
-        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").
-                isValid());
+        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid());
 
-        runner.enqueue(Paths.
-                get("src/test/resources/CompressedData/SampleFile.txt"));
+        runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1);
-        MockFlowFile flowFile = runner.
-                getFlowFilesForRelationship(CompressContent.REL_SUCCESS).
-                get(0);
+        MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0);
         flowFile.assertAttributeEquals("filename", "SampleFile.txt.gz");
 
     }
 
     @Test
     public void testDecompressFailure() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(CompressContent.class);
+        final TestRunner runner = TestRunners.newTestRunner(CompressContent.class);
         runner.setProperty(CompressContent.MODE, "decompress");
         runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip");
 
         byte[] data = new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
         runner.enqueue(data);
 
-        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").
-                isValid());
+        assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid());
         runner.run();
         runner.assertQueueEmpty();
         runner.assertAllFlowFilesTransferred(CompressContent.REL_FAILURE, 1);
 
-        runner.getFlowFilesForRelationship(CompressContent.REL_FAILURE).
-                get(0).
-                assertContentEquals(data);
+        runner.getFlowFilesForRelationship(CompressContent.REL_FAILURE).get(0).assertContentEquals(data);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestControlRate.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestControlRate.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestControlRate.java
index dcec7b3..7729056 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestControlRate.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestControlRate.java
@@ -29,8 +29,7 @@ public class TestControlRate {
     @Test
     public void testViaAttribute() throws InterruptedException {
         final TestRunner runner = TestRunners.newTestRunner(new ControlRate());
-        runner.
-                setProperty(ControlRate.RATE_CONTROL_CRITERIA, ControlRate.ATTRIBUTE_RATE);
+        runner.setProperty(ControlRate.RATE_CONTROL_CRITERIA, ControlRate.ATTRIBUTE_RATE);
         runner.setProperty(ControlRate.RATE_CONTROL_ATTRIBUTE_NAME, "count");
         runner.setProperty(ControlRate.MAX_RATE, "20000");
         runner.setProperty(ControlRate.TIME_PERIOD, "1 sec");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestConvertCharacterSet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestConvertCharacterSet.java
index f303019..1b057d9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestConvertCharacterSet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestConvertCharacterSet.java
@@ -29,21 +29,16 @@ public class TestConvertCharacterSet {
 
     @Test
     public void test() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ConvertCharacterSet());
+        final TestRunner runner = TestRunners.newTestRunner(new ConvertCharacterSet());
         runner.setProperty(ConvertCharacterSet.INPUT_CHARSET, "ASCII");
         runner.setProperty(ConvertCharacterSet.OUTPUT_CHARSET, "UTF-32");
 
-        runner.enqueue(Paths.
-                get("src/test/resources/CharacterSetConversionSamples/Original.txt"));
+        runner.enqueue(Paths.get("src/test/resources/CharacterSetConversionSamples/Original.txt"));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ConvertCharacterSet.REL_SUCCESS, 1);
-        final MockFlowFile output = runner.
-                getFlowFilesForRelationship(ConvertCharacterSet.REL_SUCCESS).
-                get(0);
-        output.
-                assertContentEquals(new File("src/test/resources/CharacterSetConversionSamples/Converted2.txt"));
+        final MockFlowFile output = runner.getFlowFilesForRelationship(ConvertCharacterSet.REL_SUCCESS).get(0);
+        output.assertContentEquals(new File("src/test/resources/CharacterSetConversionSamples/Converted2.txt"));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDetectDuplicate.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDetectDuplicate.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDetectDuplicate.java
index ef69a00..e8434f0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDetectDuplicate.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDetectDuplicate.java
@@ -50,10 +50,8 @@ public class TestDetectDuplicate {
         System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
         System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
         System.setProperty("org.slf4j.simpleLogger.log.nifi.io.nio", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.DetectDuplicate", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestDetectDuplicate", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.DetectDuplicate", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestDetectDuplicate", "debug");
         LOGGER = LoggerFactory.getLogger(TestListenUDP.class);
     }
 
@@ -62,12 +60,10 @@ public class TestDetectDuplicate {
         TestRunner runner = TestRunners.newTestRunner(DetectDuplicate.class);
         final DistributedMapCacheClientImpl client = createClient();
         final Map<String, String> clientProperties = new HashMap<>();
-        clientProperties.
-                put(DistributedMapCacheClientService.HOSTNAME.getName(), "localhost");
+        clientProperties.put(DistributedMapCacheClientService.HOSTNAME.getName(), "localhost");
         runner.addControllerService("client", client, clientProperties);
         runner.setProperty(DetectDuplicate.DISTRIBUTED_CACHE_SERVICE, "client");
-        runner.
-                setProperty(DetectDuplicate.FLOWFILE_DESCRIPTION, "The original flow file");
+        runner.setProperty(DetectDuplicate.FLOWFILE_DESCRIPTION, "The original flow file");
         runner.setProperty(DetectDuplicate.AGE_OFF_DURATION, "48 hours");
         Map<String, String> props = new HashMap<>();
         props.put("hash.value", "1000");
@@ -75,8 +71,7 @@ public class TestDetectDuplicate {
         runner.enableControllerService(client);
 
         runner.run();
-        runner.
-                assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
+        runner.assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
         runner.clearTransferState();
         client.exists = true;
         runner.enqueue(new byte[]{}, props);
@@ -92,12 +87,10 @@ public class TestDetectDuplicate {
         TestRunner runner = TestRunners.newTestRunner(DetectDuplicate.class);
         final DistributedMapCacheClientImpl client = createClient();
         final Map<String, String> clientProperties = new HashMap<>();
-        clientProperties.
-                put(DistributedMapCacheClientService.HOSTNAME.getName(), "localhost");
+        clientProperties.put(DistributedMapCacheClientService.HOSTNAME.getName(), "localhost");
         runner.addControllerService("client", client, clientProperties);
         runner.setProperty(DetectDuplicate.DISTRIBUTED_CACHE_SERVICE, "client");
-        runner.
-                setProperty(DetectDuplicate.FLOWFILE_DESCRIPTION, "The original flow file");
+        runner.setProperty(DetectDuplicate.FLOWFILE_DESCRIPTION, "The original flow file");
         runner.setProperty(DetectDuplicate.AGE_OFF_DURATION, "2 secs");
         runner.enableControllerService(client);
 
@@ -106,15 +99,13 @@ public class TestDetectDuplicate {
         runner.enqueue(new byte[]{}, props);
 
         runner.run();
-        runner.
-                assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
+        runner.assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
         runner.clearTransferState();
         client.exists = true;
         Thread.sleep(3000);
         runner.enqueue(new byte[]{}, props);
         runner.run();
-        runner.
-                assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
+        runner.assertAllFlowFilesTransferred(DetectDuplicate.REL_NON_DUPLICATE, 1);
         runner.assertTransferCount(DetectDuplicate.REL_DUPLICATE, 0);
         runner.assertTransferCount(DetectDuplicate.REL_FAILURE, 0);
     }
@@ -163,8 +154,7 @@ public class TestDetectDuplicate {
         }
 
         @Override
-        public <K, V> V getAndPutIfAbsent(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer,
-                Deserializer<V> valueDeserializer) throws IOException {
+        public <K, V> V getAndPutIfAbsent(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer, Deserializer<V> valueDeserializer) throws IOException {
             if (exists) {
                 return (V) cacheValue;
             }
@@ -212,8 +202,7 @@ public class TestDetectDuplicate {
                 }
 
                 if (child.exists()) {
-                    throw new IOException("Could not delete " + dataFile.
-                            getAbsolutePath());
+                    throw new IOException("Could not delete " + dataFile.getAbsolutePath());
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
index dfe52bf..ac2efec 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.DistributeLoad;
 import org.apache.nifi.util.TestRunner;
 import org.apache.nifi.util.TestRunners;
 
@@ -36,8 +35,7 @@ public class TestDistributeLoad {
 
     @Test
     public void testDefaultRoundRobin() {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new DistributeLoad());
+        final TestRunner testRunner = TestRunners.newTestRunner(new DistributeLoad());
         testRunner.setProperty(DistributeLoad.NUM_RELATIONSHIPS, "100");
 
         for (int i = 0; i < 101; i++) {
@@ -53,8 +51,7 @@ public class TestDistributeLoad {
 
     @Test
     public void testWeightedRoundRobin() {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new DistributeLoad());
+        final TestRunner testRunner = TestRunners.newTestRunner(new DistributeLoad());
         testRunner.setProperty(DistributeLoad.NUM_RELATIONSHIPS, "100");
 
         testRunner.setProperty("1", "5");
@@ -74,8 +71,7 @@ public class TestDistributeLoad {
 
     @Test
     public void testValidationOnAddedProperties() {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new DistributeLoad());
+        final TestRunner testRunner = TestRunners.newTestRunner(new DistributeLoad());
         testRunner.setProperty(DistributeLoad.NUM_RELATIONSHIPS, "100");
 
         testRunner.setProperty("1", "5");
@@ -121,13 +117,10 @@ public class TestDistributeLoad {
 
     @Test
     public void testNextAvailable() {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new DistributeLoad());
+        final TestRunner testRunner = TestRunners.newTestRunner(new DistributeLoad());
 
-        testRunner.
-                setProperty(DistributeLoad.NUM_RELATIONSHIPS.getName(), "100");
-        testRunner.
-                setProperty(DistributeLoad.DISTRIBUTION_STRATEGY.getName(), DistributeLoad.STRATEGY_NEXT_AVAILABLE);
+        testRunner.setProperty(DistributeLoad.NUM_RELATIONSHIPS.getName(), "100");
+        testRunner.setProperty(DistributeLoad.DISTRIBUTION_STRATEGY.getName(), DistributeLoad.STRATEGY_NEXT_AVAILABLE);
 
         for (int i = 0; i < 99; i++) {
             testRunner.enqueue(new byte[0]);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncodeContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncodeContent.java
index 5f6437a..fec411d 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncodeContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncodeContent.java
@@ -30,12 +30,10 @@ public class TestEncodeContent {
 
     @Test
     public void testBase64RoundTrip() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.ENCODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
@@ -43,9 +41,7 @@ public class TestEncodeContent {
 
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        MockFlowFile flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         testRunner.assertQueueEmpty();
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
@@ -54,20 +50,16 @@ public class TestEncodeContent {
         testRunner.run();
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         flowFile.assertContentEquals(new File("src/test/resources/hello.txt"));
     }
 
     @Test
     public void testFailDecodeNotBase64() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
@@ -78,12 +70,10 @@ public class TestEncodeContent {
 
     @Test
     public void testFailDecodeNotBase64ButIsAMultipleOfFourBytes() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.BASE64_ENCODING);
 
         testRunner.enqueue("four@@@@multiple".getBytes());
         testRunner.clearTransferState();
@@ -94,12 +84,10 @@ public class TestEncodeContent {
 
     @Test
     public void testBase32RoundTrip() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.ENCODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.BASE32_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.BASE32_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
@@ -107,9 +95,7 @@ public class TestEncodeContent {
 
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        MockFlowFile flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         testRunner.assertQueueEmpty();
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
@@ -118,20 +104,16 @@ public class TestEncodeContent {
         testRunner.run();
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         flowFile.assertContentEquals(new File("src/test/resources/hello.txt"));
     }
 
     @Test
     public void testFailDecodeNotBase32() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.BASE32_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.BASE32_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
@@ -142,12 +124,10 @@ public class TestEncodeContent {
 
     @Test
     public void testHexRoundTrip() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.ENCODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.HEX_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.HEX_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();
@@ -155,9 +135,7 @@ public class TestEncodeContent {
 
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        MockFlowFile flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         testRunner.assertQueueEmpty();
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
@@ -166,20 +144,16 @@ public class TestEncodeContent {
         testRunner.run();
         testRunner.assertAllFlowFilesTransferred(EncodeContent.REL_SUCCESS, 1);
 
-        flowFile = testRunner.
-                getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).
-                get(0);
+        flowFile = testRunner.getFlowFilesForRelationship(EncodeContent.REL_SUCCESS).get(0);
         flowFile.assertContentEquals(new File("src/test/resources/hello.txt"));
     }
 
     @Test
     public void testFailDecodeNotHex() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncodeContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncodeContent());
 
         testRunner.setProperty(EncodeContent.MODE, EncodeContent.DECODE_MODE);
-        testRunner.
-                setProperty(EncodeContent.ENCODING, EncodeContent.HEX_ENCODING);
+        testRunner.setProperty(EncodeContent.ENCODING, EncodeContent.HEX_ENCODING);
 
         testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
         testRunner.clearTransferState();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java
index 1ec1fc9..7340e0f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEncryptContent.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.EncryptContent;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Paths;
@@ -32,44 +31,33 @@ public class TestEncryptContent {
 
     @Test
     public void testRoundTrip() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EncryptContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EncryptContent());
         testRunner.setProperty(EncryptContent.PASSWORD, "Hello, World!");
 
         for (final EncryptionMethod method : EncryptionMethod.values()) {
             if (method.isUnlimitedStrength()) {
                 continue;   // cannot test unlimited strength in unit tests because it's not enabled by the JVM by default.
             }
-            testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, method.
-                    name());
-            testRunner.
-                    setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE);
+            testRunner.setProperty(EncryptContent.ENCRYPTION_ALGORITHM, method.name());
+            testRunner.setProperty(EncryptContent.MODE, EncryptContent.ENCRYPT_MODE);
 
             testRunner.enqueue(Paths.get("src/test/resources/hello.txt"));
             testRunner.clearTransferState();
             testRunner.run();
 
-            testRunner.
-                    assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1);
+            testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1);
 
-            MockFlowFile flowFile = testRunner.
-                    getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).
-                    get(0);
+            MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0);
             testRunner.assertQueueEmpty();
 
-            testRunner.
-                    setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE);
+            testRunner.setProperty(EncryptContent.MODE, EncryptContent.DECRYPT_MODE);
             testRunner.enqueue(flowFile);
             testRunner.clearTransferState();
             testRunner.run();
-            testRunner.
-                    assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1);
+            testRunner.assertAllFlowFilesTransferred(EncryptContent.REL_SUCCESS, 1);
 
-            flowFile = testRunner.
-                    getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).
-                    get(0);
-            flowFile.
-                    assertContentEquals(new File("src/test/resources/hello.txt"));
+            flowFile = testRunner.getFlowFilesForRelationship(EncryptContent.REL_SUCCESS).get(0);
+            flowFile.assertContentEquals(new File("src/test/resources/hello.txt"));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateJsonPath.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateJsonPath.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateJsonPath.java
index 25dfc1b..69d47c8 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateJsonPath.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateJsonPath.java
@@ -38,77 +38,59 @@ import static org.junit.Assert.assertEquals;
 
 public class TestEvaluateJsonPath {
 
-    private static final Path JSON_SNIPPET = Paths.
-            get("src/test/resources/TestJson/json-sample.json");
-    private static final Path XML_SNIPPET = Paths.
-            get("src/test/resources/TestXml/xml-snippet.xml");
+    private static final Path JSON_SNIPPET = Paths.get("src/test/resources/TestJson/json-sample.json");
+    private static final Path XML_SNIPPET = Paths.get("src/test/resources/TestXml/xml-snippet.xml");
 
     @Test(expected = AssertionError.class)
     public void testInvalidJsonPath() {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("invalid.jsonPath", "$..");
 
-        Assert.
-                fail("An improper JsonPath expression was not detected as being invalid.");
+        Assert.fail("An improper JsonPath expression was not detected as being invalid.");
     }
 
     @Test
     public void testInvalidJsonDocument() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
-        testRunner.
-                assertAllFlowFilesTransferred(EvaluateJsonPath.REL_FAILURE, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateJsonPath.REL_FAILURE).
-                get(0);
+        testRunner.assertAllFlowFilesTransferred(EvaluateJsonPath.REL_FAILURE, 1);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateJsonPath.REL_FAILURE).get(0);
     }
 
     @Test(expected = AssertionError.class)
     public void testInvalidConfiguration_destinationContent_twoPaths() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty("JsonPath1", "$[0]._id");
         testRunner.setProperty("JsonPath2", "$[0].name");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
 
-        Assert.
-                fail("Processor incorrectly ran with an invalid configuration of multiple paths specified as attributes for a destination of content.");
+        Assert.fail("Processor incorrectly ran with an invalid configuration of multiple paths specified as attributes for a destination of content.");
     }
 
     @Test(expected = AssertionError.class)
     public void testInvalidConfiguration_invalidJsonPath_space() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty("JsonPath1", "$[0]. _id");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
 
-        Assert.
-                fail("Processor incorrectly ran with an invalid configuration of multiple paths specified as attributes for a destination of content.");
+        Assert.fail("Processor incorrectly ran with an invalid configuration of multiple paths specified as attributes for a destination of content.");
     }
 
     @Test
     public void testConfiguration_destinationAttributes_twoPaths() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("JsonPath1", "$[0]._id");
         testRunner.setProperty("JsonPath2", "$[0].name");
 
@@ -120,10 +102,8 @@ public class TestEvaluateJsonPath {
     public void testExtractPath_destinationAttribute() throws Exception {
         String jsonPathAttrKey = "JsonPath";
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty(jsonPathAttrKey, "$[0]._id");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -132,22 +112,15 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(expectedRel).
-                get(0);
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result", "54df94072d5dbf7dc6340cc5", out.
-                        getAttribute(jsonPathAttrKey));
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(expectedRel).get(0);
+        Assert.assertEquals("Transferred flow file did not have the correct result", "54df94072d5dbf7dc6340cc5", out.getAttribute(jsonPathAttrKey));
     }
 
     @Test
     public void testExtractPath_destinationAttributes_twoPaths() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
 
         String jsonPathIdAttrKey = "evaluatejson.id";
         String jsonPathNameAttrKey = "evaluatejson.name";
@@ -161,23 +134,15 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(expectedRel).
-                get(0);
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for id attribute", "54df94072d5dbf7dc6340cc5", out.
-                        getAttribute(jsonPathIdAttrKey));
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for name attribute", "{\"first\":\"Shaffer\",\"last\":\"Pearson\"}", out.
-                        getAttribute(jsonPathNameAttrKey));
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(expectedRel).get(0);
+        Assert.assertEquals("Transferred flow file did not have the correct result for id attribute", "54df94072d5dbf7dc6340cc5", out.getAttribute(jsonPathIdAttrKey));
+        Assert.assertEquals("Transferred flow file did not have the correct result for name attribute", "{\"first\":\"Shaffer\",\"last\":\"Pearson\"}", out.getAttribute(jsonPathNameAttrKey));
     }
 
     @Test
     public void testExtractPath_destinationAttributes_twoPaths_notFound() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
 
         String jsonPathIdAttrKey = "evaluatejson.id";
         String jsonPathNameAttrKey = "evaluatejson.name";
@@ -191,23 +156,15 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(expectedRel).
-                get(0);
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for id attribute", "", out.
-                        getAttribute(jsonPathIdAttrKey));
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for name attribute", "", out.
-                        getAttribute(jsonPathNameAttrKey));
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(expectedRel).get(0);
+        Assert.assertEquals("Transferred flow file did not have the correct result for id attribute", "", out.getAttribute(jsonPathIdAttrKey));
+        Assert.assertEquals("Transferred flow file did not have the correct result for name attribute", "", out.getAttribute(jsonPathNameAttrKey));
     }
 
     @Test
     public void testExtractPath_destinationAttributes_twoPaths_oneFound() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
 
         String jsonPathIdAttrKey = "evaluatejson.id";
         String jsonPathNameAttrKey = "evaluatejson.name";
@@ -221,25 +178,17 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(expectedRel).
-                get(0);
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for id attribute", "54df94072d5dbf7dc6340cc5", out.
-                        getAttribute(jsonPathIdAttrKey));
-        Assert.
-                assertEquals("Transferred flow file did not have the correct result for name attribute", StringUtils.EMPTY, out.
-                        getAttribute(jsonPathNameAttrKey));
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(expectedRel).get(0);
+        Assert.assertEquals("Transferred flow file did not have the correct result for id attribute", "54df94072d5dbf7dc6340cc5", out.getAttribute(jsonPathIdAttrKey));
+        Assert.assertEquals("Transferred flow file did not have the correct result for name attribute", StringUtils.EMPTY, out.getAttribute(jsonPathNameAttrKey));
     }
 
     @Test
     public void testExtractPath_destinationContent() throws Exception {
         String jsonPathAttrKey = "JsonPath";
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty(jsonPathAttrKey, "$[0]._id");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -248,19 +197,15 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        testRunner.getFlowFilesForRelationship(expectedRel).
-                get(0).
-                assertContentEquals("54df94072d5dbf7dc6340cc5");
+        testRunner.getFlowFilesForRelationship(expectedRel).get(0).assertContentEquals("54df94072d5dbf7dc6340cc5");
     }
 
     @Test
     public void testExtractPath_destinationContent_indefiniteResult() throws Exception {
         String jsonPathAttrKey = "friends.indefinite.id.list";
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty(jsonPathAttrKey, "$[0].friends.[*].id");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -269,19 +214,15 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        testRunner.getFlowFilesForRelationship(expectedRel).
-                get(0).
-                assertContentEquals("[0,1,2]");
+        testRunner.getFlowFilesForRelationship(expectedRel).get(0).assertContentEquals("[0,1,2]");
     }
 
     @Test
     public void testExtractPath_destinationContent_indefiniteResult_operators() throws Exception {
         String jsonPathAttrKey = "friends.indefinite.id.list";
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty(jsonPathAttrKey, "$[0].friends[?(@.id < 3)].id");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -290,17 +231,13 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        testRunner.getFlowFilesForRelationship(expectedRel).
-                get(0).
-                assertContentEquals("[0,1,2]");
+        testRunner.getFlowFilesForRelationship(expectedRel).get(0).assertContentEquals("[0,1,2]");
     }
 
     @Test
     public void testRouteUnmatched_destinationContent_noMatch() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty("jsonPath", "$[0].nonexistent.key");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -309,21 +246,16 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_NO_MATCH;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        testRunner.getFlowFilesForRelationship(expectedRel).
-                get(0).
-                assertContentEquals(JSON_SNIPPET);
+        testRunner.getFlowFilesForRelationship(expectedRel).get(0).assertContentEquals(JSON_SNIPPET);
     }
 
     @Test
     public void testRouteFailure_returnTypeScalar_resultArray() throws Exception {
         String jsonPathAttrKey = "friends.indefinite.id.list";
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_SCALAR);
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_SCALAR);
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_CONTENT);
         testRunner.setProperty(jsonPathAttrKey, "$[0].friends[?(@.id < 3)].id");
 
         testRunner.enqueue(JSON_SNIPPET);
@@ -332,34 +264,26 @@ public class TestEvaluateJsonPath {
         Relationship expectedRel = EvaluateJsonPath.REL_FAILURE;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        testRunner.getFlowFilesForRelationship(expectedRel).
-                get(0).
-                assertContentEquals(JSON_SNIPPET);
+        testRunner.getFlowFilesForRelationship(expectedRel).get(0).assertContentEquals(JSON_SNIPPET);
     }
 
     @Test
     public void testNullInput() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("stringField", "$.stringField");
         testRunner.setProperty("missingField", "$.missingField");
         testRunner.setProperty("nullField", "$.nullField");
 
-        ProcessSession session = testRunner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = testRunner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
 
         ff = session.write(ff, new OutputStreamCallback() {
             @Override
             public void process(OutputStream out) throws IOException {
                 try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                    outputStream.
-                            write("{\"stringField\": \"String Value\", \"nullField\": null}".
-                                    getBytes(StandardCharsets.UTF_8));
+                    outputStream.write("{\"stringField\": \"String Value\", \"nullField\": null}".getBytes(StandardCharsets.UTF_8));
                 }
             }
         });
@@ -369,9 +293,7 @@ public class TestEvaluateJsonPath {
 
         testRunner.assertTransferCount(EvaluateJsonPath.REL_MATCH, 1);
 
-        FlowFile output = testRunner.
-                getFlowFilesForRelationship(EvaluateJsonPath.REL_MATCH).
-                get(0);
+        FlowFile output = testRunner.getFlowFilesForRelationship(EvaluateJsonPath.REL_MATCH).get(0);
 
         String validFieldValue = output.getAttribute("stringField");
         assertEquals("String Value", validFieldValue);
@@ -385,29 +307,22 @@ public class TestEvaluateJsonPath {
 
     @Test
     public void testNullInput_nullStringRepresentation() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateJsonPath());
-        testRunner.
-                setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
-        testRunner.
-                setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty(EvaluateJsonPath.NULL_VALUE_DEFAULT_REPRESENTATION, AbstractJsonPathProcessor.NULL_STRING_OPTION);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateJsonPath());
+        testRunner.setProperty(EvaluateJsonPath.RETURN_TYPE, EvaluateJsonPath.RETURN_TYPE_JSON);
+        testRunner.setProperty(EvaluateJsonPath.DESTINATION, EvaluateJsonPath.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty(EvaluateJsonPath.NULL_VALUE_DEFAULT_REPRESENTATION, AbstractJsonPathProcessor.NULL_STRING_OPTION);
         testRunner.setProperty("stringField", "$.stringField");
         testRunner.setProperty("missingField", "$.missingField");
         testRunner.setProperty("nullField", "$.nullField");
 
-        ProcessSession session = testRunner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = testRunner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
 
         ff = session.write(ff, new OutputStreamCallback() {
             @Override
             public void process(OutputStream out) throws IOException {
                 try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                    outputStream.
-                            write("{\"stringField\": \"String Value\", \"nullField\": null}".
-                                    getBytes(StandardCharsets.UTF_8));
+                    outputStream.write("{\"stringField\": \"String Value\", \"nullField\": null}".getBytes(StandardCharsets.UTF_8));
                 }
             }
         });
@@ -417,9 +332,7 @@ public class TestEvaluateJsonPath {
 
         testRunner.assertTransferCount(EvaluateJsonPath.REL_MATCH, 1);
 
-        FlowFile output = testRunner.
-                getFlowFilesForRelationship(EvaluateJsonPath.REL_MATCH).
-                get(0);
+        FlowFile output = testRunner.getFlowFilesForRelationship(EvaluateJsonPath.REL_MATCH).get(0);
 
         String validFieldValue = output.getAttribute("stringField");
         assertEquals("String Value", validFieldValue);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXPath.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXPath.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXPath.java
index b88b9f9..95e475f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXPath.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXPath.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.EvaluateXPath;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
@@ -33,81 +32,60 @@ import org.junit.Test;
 
 public class TestEvaluateXPath {
 
-    private static final Path XML_SNIPPET = Paths.
-            get("src/test/resources/TestXml/xml-snippet.xml");
+    private static final Path XML_SNIPPET = Paths.get("src/test/resources/TestXml/xml-snippet.xml");
 
     @Test
     public void testAsAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("xpath.result1", "/");
-        testRunner.
-                setProperty("xpath.result2", "/*:bundle/node/subNode/value/text()");
+        testRunner.setProperty("xpath.result2", "/*:bundle/node/subNode/value/text()");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).get(0);
         out.assertAttributeEquals("xpath.result2", "Hello");
-        assertTrue(out.getAttribute("xpath.result1").
-                contains("Hello"));
+        assertTrue(out.getAttribute("xpath.result1").contains("Hello"));
     }
 
     @Test
     public void testCheckIfElementExists() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("xpath.result1", "/");
-        testRunner.
-                setProperty("xpath.result.exist.1", "boolean(/*:bundle/node)");
-        testRunner.
-                setProperty("xpath.result.exist.2", "boolean(/*:bundle/node2)");
+        testRunner.setProperty("xpath.result.exist.1", "boolean(/*:bundle/node)");
+        testRunner.setProperty("xpath.result.exist.2", "boolean(/*:bundle/node2)");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).
-                get(0);
-        assertTrue(out.getAttribute("xpath.result1").
-                contains("Hello"));
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).get(0);
+        assertTrue(out.getAttribute("xpath.result1").contains("Hello"));
         out.assertAttributeEquals("xpath.result.exist.1", "true");
         out.assertAttributeEquals("xpath.result.exist.2", "false");
     }
 
     @Test
     public void testUnmatched() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
         testRunner.setProperty("xpath.result.exist.2", "/*:bundle/node2");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_NO_MATCH, 1);
-        testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_NO_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_NO_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test(expected = java.lang.AssertionError.class)
     public void testMultipleXPathForContent() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
-        testRunner.
-                setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_AUTO);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
+        testRunner.setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_AUTO);
         testRunner.setProperty("some.property.1", "/*:bundle/node/subNode[1]");
         testRunner.setProperty("some.property.2", "/*:bundle/node/subNode[2]");
 
@@ -117,19 +95,15 @@ public class TestEvaluateXPath {
 
     @Test
     public void testWriteToContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
         testRunner.setProperty("some.property", "/*:bundle/node/subNode[1]");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).get(0);
         final byte[] outData = testRunner.getContentAsByteArray(out);
         final String outXml = new String(outData, "UTF-8");
         assertTrue(outXml.contains("subNode"));
@@ -138,10 +112,8 @@ public class TestEvaluateXPath {
 
     @Test
     public void testFailureIfContentMatchesMultipleNodes() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
         testRunner.setProperty("some.property", "/*:bundle/node/subNode");
 
         testRunner.enqueue(XML_SNIPPET);
@@ -152,45 +124,33 @@ public class TestEvaluateXPath {
 
     @Test
     public void testWriteStringToContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
-        testRunner.
-                setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_STRING);
-        testRunner.
-                setProperty("some.property", "/*:bundle/node/subNode[1]/value/text()");
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_CONTENT);
+        testRunner.setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_STRING);
+        testRunner.setProperty("some.property", "/*:bundle/node/subNode[1]/value/text()");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).get(0);
         final byte[] outData = testRunner.getContentAsByteArray(out);
         final String outXml = new String(outData, "UTF-8");
-        assertTrue(outXml.trim().
-                equals("Hello"));
+        assertTrue(outXml.trim().equals("Hello"));
     }
 
     @Test
     public void testWriteNodeSetToAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXPath());
-        testRunner.
-                setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_NODESET);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXPath());
+        testRunner.setProperty(EvaluateXPath.DESTINATION, EvaluateXPath.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty(EvaluateXPath.RETURN_TYPE, EvaluateXPath.RETURN_TYPE_NODESET);
         testRunner.setProperty("some.property", "/*:bundle/node/subNode[1]");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXPath.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXPath.REL_MATCH).get(0);
         final String outXml = out.getAttribute("some.property");
         assertTrue(outXml.contains("subNode"));
         assertTrue(outXml.contains("Hello"));


[41/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/525ce7fb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/525ce7fb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/525ce7fb

Branch: refs/heads/NIFI-292
Commit: 525ce7fb2db5c039c3b5d1c36085953f6ca9af10
Parents: e1160f5
Author: joewitt <jo...@apache.org>
Authored: Tue Apr 28 00:32:13 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Tue Apr 28 00:32:13 2015 -0400

----------------------------------------------------------------------
 .../flowfile/attributes/CoreAttributes.java     |  12 +-
 .../apache/nifi/remote/VersionNegotiator.java   |  15 +--
 .../TransmissionDisabledException.java          |   3 +-
 .../nifi/remote/io/CompressionOutputStream.java |   3 +-
 .../remote/io/socket/BufferStateManager.java    |   4 +-
 .../socket/ssl/SSLSocketChannelInputStream.java |   3 +-
 .../ssl/SSLSocketChannelOutputStream.java       |   3 +-
 .../nifi/stream/io/BufferedInputStream.java     |   7 +-
 .../nifi/stream/io/BufferedOutputStream.java    |  31 ++---
 .../nifi/stream/io/ByteArrayInputStream.java    | 113 ++++++------------
 .../nifi/stream/io/ByteArrayOutputStream.java   |  80 +++++--------
 .../stream/io/ByteCountingOutputStream.java     |   5 +-
 .../apache/nifi/stream/io/DataOutputStream.java | 113 ++++++------------
 .../apache/nifi/stream/io/GZIPOutputStream.java |   4 +-
 .../stream/io/LeakyBucketStreamThrottler.java   |   3 +-
 .../stream/io/MinimumLengthInputStream.java     |   3 +-
 .../nifi/stream/io/NonCloseableInputStream.java |   5 +-
 .../org/apache/nifi/stream/io/StreamUtils.java  |  30 ++---
 .../apache/nifi/stream/io/ZipOutputStream.java  |   5 +-
 .../java/org/apache/nifi/util/EscapeUtils.java  |   4 +-
 .../java/org/apache/nifi/util/LongHolder.java   |   6 +-
 .../apache/nifi/util/NaiveSearchRingBuffer.java |  21 ++--
 .../java/org/apache/nifi/util/RingBuffer.java   |  26 ++---
 .../java/org/apache/nifi/util/StopWatch.java    |   3 +-
 .../org/apache/nifi/util/file/FileUtils.java    | 114 ++++++-------------
 .../file/monitor/CompoundUpdateMonitor.java     |   7 +-
 .../file/monitor/SynchronousFileWatcher.java    |   6 +-
 .../org/apache/nifi/util/search/Search.java     |  18 +--
 .../org/apache/nifi/util/search/SearchTerm.java |   5 +-
 29 files changed, 203 insertions(+), 449 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/flowfile/attributes/CoreAttributes.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/flowfile/attributes/CoreAttributes.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/flowfile/attributes/CoreAttributes.java
index b0f4048..9b4c3af 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/flowfile/attributes/CoreAttributes.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/flowfile/attributes/CoreAttributes.java
@@ -19,18 +19,15 @@ package org.apache.nifi.flowfile.attributes;
 public enum CoreAttributes implements FlowFileAttributeKey {
 
     /**
-     * The flowfile's path indicates the relative directory to which a FlowFile
-     * belongs and does not contain the filename
+     * The flowfile's path indicates the relative directory to which a FlowFile belongs and does not contain the filename
      */
     PATH("path"),
     /**
-     * The flowfile's absolute path indicates the absolute directory to which a
-     * FlowFile belongs and does not contain the filename
+     * The flowfile's absolute path indicates the absolute directory to which a FlowFile belongs and does not contain the filename
      */
     ABSOLUTE_PATH("absolute.path"),
     /**
-     * The filename of the FlowFile. The filename should not contain any
-     * directory structure.
+     * The filename of the FlowFile. The filename should not contain any directory structure.
      */
     FILENAME("filename"),
     /**
@@ -50,8 +47,7 @@ public enum CoreAttributes implements FlowFileAttributeKey {
      */
     DISCARD_REASON("discard.reason"),
     /**
-     * Indicates an identifier other than the FlowFile's UUID that is known to
-     * refer to this FlowFile.
+     * Indicates an identifier other than the FlowFile's UUID that is known to refer to this FlowFile.
      */
     ALTERNATE_IDENTIFIER("alternate.identifier");
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/VersionNegotiator.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/VersionNegotiator.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/VersionNegotiator.java
index a05ea9d..d8ee27a 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/VersionNegotiator.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/VersionNegotiator.java
@@ -26,13 +26,10 @@ public interface VersionNegotiator {
     int getVersion();
 
     /**
-     * Sets the version of this resource to the specified version. Only the
-     * lower byte of the version is relevant.
+     * Sets the version of this resource to the specified version. Only the lower byte of the version is relevant.
      *
      * @param version the version to set
-     * @throws IllegalArgumentException if the given Version is not supported by
-     * this resource, as is indicated by the {@link #isVersionSupported(int)}
-     * method
+     * @throws IllegalArgumentException if the given Version is not supported by this resource, as is indicated by the {@link #isVersionSupported(int)} method
      */
     void setVersion(int version) throws IllegalArgumentException;
 
@@ -43,9 +40,8 @@ public interface VersionNegotiator {
     int getPreferredVersion();
 
     /**
-     * Gets the preferred version of this resource that is no greater than the
-     * given maxVersion. If no acceptable version exists that is less than
-     * <code>maxVersion</code>, then <code>null</code> is returned
+     * Gets the preferred version of this resource that is no greater than the given maxVersion. If no acceptable version exists that is less than <code>maxVersion</code>, then <code>null</code> is
+     * returned
      *
      * @param maxVersion the maximum version desired
      * @return the preferred version if found; null otherwise
@@ -53,8 +49,7 @@ public interface VersionNegotiator {
     Integer getPreferredVersion(int maxVersion);
 
     /**
-     * Indicates whether or not the specified version is supported by this
-     * resource
+     * Indicates whether or not the specified version is supported by this resource
      *
      * @param version the version to test
      * @return true if supported; false otherwise

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/exception/TransmissionDisabledException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/exception/TransmissionDisabledException.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/exception/TransmissionDisabledException.java
index 05fd915..d18c807 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/exception/TransmissionDisabledException.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/exception/TransmissionDisabledException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.remote.exception;
 
 /**
- * Indicates that the user disabled transmission while communications were
- * taking place with a peer
+ * Indicates that the user disabled transmission while communications were taking place with a peer
  */
 public class TransmissionDisabledException extends RuntimeException {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/CompressionOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/CompressionOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/CompressionOutputStream.java
index 311c84c..525b5b1 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/CompressionOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/CompressionOutputStream.java
@@ -58,8 +58,7 @@ public class CompressionOutputStream extends OutputStream {
     }
 
     /**
-     * Compresses the currently buffered chunk of data and sends it to the
-     * output stream
+     * Compresses the currently buffered chunk of data and sends it to the output stream
      *
      * @throws IOException if issues occur writing to stream
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/BufferStateManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/BufferStateManager.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/BufferStateManager.java
index e613155..6e54d62 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/BufferStateManager.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/BufferStateManager.java
@@ -38,9 +38,7 @@ public class BufferStateManager {
     }
 
     /**
-     * Ensures that the buffer is at least as big as the size specified,
-     * resizing the buffer if necessary. This operation MAY change the direction
-     * of the buffer.
+     * Ensures that the buffer is at least as big as the size specified, resizing the buffer if necessary. This operation MAY change the direction of the buffer.
      *
      * @param requiredSize the desired size of the buffer
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelInputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelInputStream.java
index 19179bc..ca6de85 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelInputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelInputStream.java
@@ -47,8 +47,7 @@ public class SSLSocketChannelInputStream extends InputStream {
     }
 
     /**
-     * Closes the underlying SSLSocketChannel, which will also close the
-     * OutputStream and connection
+     * Closes the underlying SSLSocketChannel, which will also close the OutputStream and connection
      */
     @Override
     public void close() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelOutputStream.java
index ce4e420..262cf54 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/remote/io/socket/ssl/SSLSocketChannelOutputStream.java
@@ -43,8 +43,7 @@ public class SSLSocketChannelOutputStream extends OutputStream {
     }
 
     /**
-     * Closes the underlying SSLSocketChannel, which also will close the
-     * InputStream and the connection
+     * Closes the underlying SSLSocketChannel, which also will close the InputStream and the connection
      */
     @Override
     public void close() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedInputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedInputStream.java
index aaf37ea..2afaa70 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedInputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedInputStream.java
@@ -19,11 +19,8 @@ package org.apache.nifi.stream.io;
 import java.io.InputStream;
 
 /**
- * This class is a slight modification of the BufferedInputStream in the java.io
- * package. The modification is that this implementation does not provide
- * synchronization on method calls, which means that this class is not suitable
- * for use by multiple threads. However, the absence of these synchronized
- * blocks results in potentially much better performance.
+ * This class is a slight modification of the BufferedInputStream in the java.io package. The modification is that this implementation does not provide synchronization on method calls, which means
+ * that this class is not suitable for use by multiple threads. However, the absence of these synchronized blocks results in potentially much better performance.
  */
 public class BufferedInputStream extends java.io.BufferedInputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedOutputStream.java
index eadfcab..dc56927 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/BufferedOutputStream.java
@@ -21,11 +21,8 @@ import java.io.IOException;
 import java.io.OutputStream;
 
 /**
- * This class is a slight modification of the
- * {@link java.io.BufferedOutputStream} class. This implementation differs in
- * that it does not mark methods as synchronized. This means that this class is
- * not suitable for writing by multiple concurrent threads. However, the removal
- * of the synchronized keyword results in potentially much better performance.
+ * This class is a slight modification of the {@link java.io.BufferedOutputStream} class. This implementation differs in that it does not mark methods as synchronized. This means that this class is
+ * not suitable for writing by multiple concurrent threads. However, the removal of the synchronized keyword results in potentially much better performance.
  */
 public class BufferedOutputStream extends FilterOutputStream {
 
@@ -35,15 +32,13 @@ public class BufferedOutputStream extends FilterOutputStream {
     protected byte buf[];
 
     /**
-     * The number of valid bytes in the buffer. This value is always in the
-     * range <tt>0</tt> through <tt>buf.length</tt>; elements
+     * The number of valid bytes in the buffer. This value is always in the range <tt>0</tt> through <tt>buf.length</tt>; elements
      * <tt>buf[0]</tt> through <tt>buf[count-1]</tt> contain valid byte data.
      */
     protected int count;
 
     /**
-     * Creates a new buffered output stream to write data to the specified
-     * underlying output stream.
+     * Creates a new buffered output stream to write data to the specified underlying output stream.
      *
      * @param out the underlying output stream.
      */
@@ -52,8 +47,7 @@ public class BufferedOutputStream extends FilterOutputStream {
     }
 
     /**
-     * Creates a new buffered output stream to write data to the specified
-     * underlying output stream with the specified buffer size.
+     * Creates a new buffered output stream to write data to the specified underlying output stream with the specified buffer size.
      *
      * @param out the underlying output stream.
      * @param size the buffer size.
@@ -92,16 +86,12 @@ public class BufferedOutputStream extends FilterOutputStream {
     }
 
     /**
-     * Writes <code>len</code> bytes from the specified byte array starting at
-     * offset <code>off</code> to this buffered output stream.
+     * Writes <code>len</code> bytes from the specified byte array starting at offset <code>off</code> to this buffered output stream.
      *
      * <p>
-     * Ordinarily this method stores bytes from the given array into this
-     * stream's buffer, flushing the buffer to the underlying output stream as
-     * needed. If the requested length is at least as large as this stream's
-     * buffer, however, then this method will flush the buffer and write the
-     * bytes directly to the underlying output stream. Thus redundant
-     * <code>BufferedOutputStream</code>s will not copy data unnecessarily.
+     * Ordinarily this method stores bytes from the given array into this stream's buffer, flushing the buffer to the underlying output stream as needed. If the requested length is at least as large
+     * as this stream's buffer, however, then this method will flush the buffer and write the bytes directly to the underlying output stream. Thus redundant <code>BufferedOutputStream</code>s will not
+     * copy data unnecessarily.
      *
      * @param b the data.
      * @param off the start offset in the data.
@@ -126,8 +116,7 @@ public class BufferedOutputStream extends FilterOutputStream {
     }
 
     /**
-     * Flushes this buffered output stream. This forces any buffered output
-     * bytes to be written out to the underlying output stream.
+     * Flushes this buffered output stream. This forces any buffered output bytes to be written out to the underlying output stream.
      *
      * @exception IOException if an I/O error occurs.
      * @see java.io.FilterOutputStream#out

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayInputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayInputStream.java
index 284cd54..85c8c4f 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayInputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayInputStream.java
@@ -19,55 +19,41 @@ package org.apache.nifi.stream.io;
 import java.io.InputStream;
 
 /**
- * This class performs the same function as java.io.ByteArrayInputStream but
- * does not mark its methods as synchronized
+ * This class performs the same function as java.io.ByteArrayInputStream but does not mark its methods as synchronized
  */
 public class ByteArrayInputStream extends InputStream {
 
     /**
-     * An array of bytes that was provided by the creator of the stream.
-     * Elements <code>buf[0]</code> through <code>buf[count-1]</code> are the
-     * only bytes that can ever be read from the stream; element
-     * <code>buf[pos]</code> is the next byte to be read.
+     * An array of bytes that was provided by the creator of the stream. Elements <code>buf[0]</code> through <code>buf[count-1]</code> are the only bytes that can ever be read from the stream;
+     * element <code>buf[pos]</code> is the next byte to be read.
      */
     protected byte buf[];
 
     /**
-     * The index of the next character to read from the input stream buffer.
-     * This value should always be nonnegative and not larger than the value of
-     * <code>count</code>. The next byte to be read from the input stream buffer
-     * will be <code>buf[pos]</code>.
+     * The index of the next character to read from the input stream buffer. This value should always be nonnegative and not larger than the value of <code>count</code>. The next byte to be read from
+     * the input stream buffer will be <code>buf[pos]</code>.
      */
     protected int pos;
 
     /**
-     * The currently marked position in the stream. ByteArrayInputStream objects
-     * are marked at position zero by default when constructed. They may be
-     * marked at another position within the buffer by the <code>mark()</code>
-     * method. The current buffer position is set to this point by the
-     * <code>reset()</code> method.
+     * The currently marked position in the stream. ByteArrayInputStream objects are marked at position zero by default when constructed. They may be marked at another position within the buffer by
+     * the <code>mark()</code> method. The current buffer position is set to this point by the <code>reset()</code> method.
      * <p>
-     * If no mark has been set, then the value of mark is the offset passed to
-     * the constructor (or 0 if the offset was not supplied).
+     * If no mark has been set, then the value of mark is the offset passed to the constructor (or 0 if the offset was not supplied).
      *
      * @since JDK1.1
      */
     protected int mark = 0;
 
     /**
-     * The index one greater than the last valid character in the input stream
-     * buffer. This value should always be nonnegative and not larger than the
-     * length of <code>buf</code>. It is one greater than the position of the
-     * last byte within <code>buf</code> that can ever be read from the input
-     * stream buffer.
+     * The index one greater than the last valid character in the input stream buffer. This value should always be nonnegative and not larger than the length of <code>buf</code>. It is one greater
+     * than the position of the last byte within <code>buf</code> that can ever be read from the input stream buffer.
      */
     protected int count;
 
     /**
-     * Creates a <code>ByteArrayInputStream</code> so that it uses
-     * <code>buf</code> as its buffer array. The buffer array is not copied. The
-     * initial value of <code>pos</code> is <code>0</code> and the initial value
-     * of  <code>count</code> is the length of <code>buf</code>.
+     * Creates a <code>ByteArrayInputStream</code> so that it uses <code>buf</code> as its buffer array. The buffer array is not copied. The initial value of <code>pos</code> is <code>0</code> and the
+     * initial value of  <code>count</code> is the length of <code>buf</code>.
      *
      * @param buf the input buffer.
      */
@@ -78,12 +64,8 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Creates <code>ByteArrayInputStream</code> that uses <code>buf</code> as
-     * its buffer array. The initial value of <code>pos</code> is
-     * <code>offset</code> and the initial value of <code>count</code> is the
-     * minimum of <code>offset+length</code> and <code>buf.length</code>. The
-     * buffer array is not copied. The buffer's mark is set to the specified
-     * offset.
+     * Creates <code>ByteArrayInputStream</code> that uses <code>buf</code> as its buffer array. The initial value of <code>pos</code> is <code>offset</code> and the initial value of
+     * <code>count</code> is the minimum of <code>offset+length</code> and <code>buf.length</code>. The buffer array is not copied. The buffer's mark is set to the specified offset.
      *
      * @param buf the input buffer.
      * @param offset the offset in the buffer of the first byte to read.
@@ -97,15 +79,12 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Reads the next byte of data from this input stream. The value byte is
-     * returned as an <code>int</code> in the range <code>0</code> to
-     * <code>255</code>. If no byte is available because the end of the stream
-     * has been reached, the value <code>-1</code> is returned.
+     * Reads the next byte of data from this input stream. The value byte is returned as an <code>int</code> in the range <code>0</code> to <code>255</code>. If no byte is available because the end of
+     * the stream has been reached, the value <code>-1</code> is returned.
      * <p>
      * This <code>read</code> method cannot block.
      *
-     * @return the next byte of data, or <code>-1</code> if the end of the
-     * stream has been reached.
+     * @return the next byte of data, or <code>-1</code> if the end of the stream has been reached.
      */
     @Override
     public int read() {
@@ -113,29 +92,19 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Reads up to <code>len</code> bytes of data into an array of bytes from
-     * this input stream. If <code>pos</code> equals <code>count</code>, then
-     * <code>-1</code> is returned to indicate end of file. Otherwise, the
-     * number <code>k</code> of bytes read is equal to the smaller of
-     * <code>len</code> and <code>count-pos</code>. If <code>k</code> is
-     * positive, then bytes <code>buf[pos]</code> through
-     * <code>buf[pos+k-1]</code> are copied into <code>b[off]</code> through
-     * <code>b[off+k-1]</code> in the manner performed by
-     * <code>System.arraycopy</code>. The value <code>k</code> is added into
-     * <code>pos</code> and <code>k</code> is returned.
+     * Reads up to <code>len</code> bytes of data into an array of bytes from this input stream. If <code>pos</code> equals <code>count</code>, then <code>-1</code> is returned to indicate end of
+     * file. Otherwise, the number <code>k</code> of bytes read is equal to the smaller of <code>len</code> and <code>count-pos</code>. If <code>k</code> is positive, then bytes <code>buf[pos]</code>
+     * through <code>buf[pos+k-1]</code> are copied into <code>b[off]</code> through <code>b[off+k-1]</code> in the manner performed by <code>System.arraycopy</code>. The value <code>k</code> is added
+     * into <code>pos</code> and <code>k</code> is returned.
      * <p>
      * This <code>read</code> method cannot block.
      *
      * @param b the buffer into which the data is read.
      * @param off the start offset in the destination array <code>b</code>
      * @param len the maximum number of bytes read.
-     * @return the total number of bytes read into the buffer, or
-     * <code>-1</code> if there is no more data because the end of the stream
-     * has been reached.
+     * @return the total number of bytes read into the buffer, or <code>-1</code> if there is no more data because the end of the stream has been reached.
      * @exception NullPointerException If <code>b</code> is <code>null</code>.
-     * @exception IndexOutOfBoundsException If <code>off</code> is negative,
-     * <code>len</code> is negative, or <code>len</code> is greater than
-     * <code>b.length - off</code>
+     * @exception IndexOutOfBoundsException If <code>off</code> is negative, <code>len</code> is negative, or <code>len</code> is greater than <code>b.length - off</code>
      */
     @Override
     public int read(byte b[], int off, int len) {
@@ -162,11 +131,8 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Skips <code>n</code> bytes of input from this input stream. Fewer bytes
-     * might be skipped if the end of the input stream is reached. The actual
-     * number <code>k</code> of bytes to be skipped is equal to the smaller of
-     * <code>n</code> and  <code>count-pos</code>. The value <code>k</code> is
-     * added into <code>pos</code> and <code>k</code> is returned.
+     * Skips <code>n</code> bytes of input from this input stream. Fewer bytes might be skipped if the end of the input stream is reached. The actual number <code>k</code> of bytes to be skipped is
+     * equal to the smaller of <code>n</code> and  <code>count-pos</code>. The value <code>k</code> is added into <code>pos</code> and <code>k</code> is returned.
      *
      * @param n the number of bytes to be skipped.
      * @return the actual number of bytes skipped.
@@ -183,14 +149,11 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Returns the number of remaining bytes that can be read (or skipped over)
-     * from this input stream.
+     * Returns the number of remaining bytes that can be read (or skipped over) from this input stream.
      * <p>
-     * The value returned is <code>count&nbsp;- pos</code>, which is the number
-     * of bytes remaining to be read from the input buffer.
+     * The value returned is <code>count&nbsp;- pos</code>, which is the number of bytes remaining to be read from the input buffer.
      *
-     * @return the number of remaining bytes that can be read (or skipped over)
-     * from this input stream without blocking.
+     * @return the number of remaining bytes that can be read (or skipped over) from this input stream without blocking.
      */
     @Override
     public int available() {
@@ -198,9 +161,7 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Tests if this <code>InputStream</code> supports mark/reset. The
-     * <code>markSupported</code> method of <code>ByteArrayInputStream</code>
-     * always returns <code>true</code>.
+     * Tests if this <code>InputStream</code> supports mark/reset. The <code>markSupported</code> method of <code>ByteArrayInputStream</code> always returns <code>true</code>.
      *
      * @since JDK1.1
      */
@@ -210,12 +171,10 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Set the current marked position in the stream. ByteArrayInputStream
-     * objects are marked at position zero by default when constructed. They may
-     * be marked at another position within the buffer by this method.
+     * Set the current marked position in the stream. ByteArrayInputStream objects are marked at position zero by default when constructed. They may be marked at another position within the buffer by
+     * this method.
      * <p>
-     * If no mark has been set, then the value of the mark is the offset passed
-     * to the constructor (or 0 if the offset was not supplied).
+     * If no mark has been set, then the value of the mark is the offset passed to the constructor (or 0 if the offset was not supplied).
      *
      * <p>
      * Note: The <code>readAheadLimit</code> for this class has no meaning.
@@ -228,9 +187,7 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Resets the buffer to the marked position. The marked position is 0 unless
-     * another position was marked or an offset was specified in the
-     * constructor.
+     * Resets the buffer to the marked position. The marked position is 0 unless another position was marked or an offset was specified in the constructor.
      */
     @Override
     public void reset() {
@@ -238,9 +195,7 @@ public class ByteArrayInputStream extends InputStream {
     }
 
     /**
-     * Closing a <tt>ByteArrayInputStream</tt> has no effect. The methods in
-     * this class can be called after the stream has been closed without
-     * generating an <tt>IOException</tt>.
+     * Closing a <tt>ByteArrayInputStream</tt> has no effect. The methods in this class can be called after the stream has been closed without generating an <tt>IOException</tt>.
      * <p>
      */
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayOutputStream.java
index bbc5cee..aade199 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteArrayOutputStream.java
@@ -22,14 +22,11 @@ import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
 
 /**
- * This class provides a more efficient implementation of the
- * java.io.ByteArrayOutputStream. The efficiency is gained in two ways:
+ * This class provides a more efficient implementation of the java.io.ByteArrayOutputStream. The efficiency is gained in two ways:
  * <ul>
  * <li>The write methods are not synchronized</li>
- * <li>The class provides {@link #getUnderlyingBuffer()} and
- * {@link #getBufferLength()}, which can be used to access the underlying byte
- * array directly, rather than the System.arraycopy that {@link #toByteArray()}
- * uses
+ * <li>The class provides {@link #getUnderlyingBuffer()} and {@link #getBufferLength()}, which can be used to access the underlying byte array directly, rather than the System.arraycopy that
+ * {@link #toByteArray()} uses
  * </ul>
  *
  */
@@ -46,16 +43,14 @@ public class ByteArrayOutputStream extends OutputStream {
     protected int count;
 
     /**
-     * Creates a new byte array output stream. The buffer capacity is initially
-     * 32 bytes, though its size increases if necessary.
+     * Creates a new byte array output stream. The buffer capacity is initially 32 bytes, though its size increases if necessary.
      */
     public ByteArrayOutputStream() {
         this(32);
     }
 
     /**
-     * Creates a new byte array output stream, with a buffer capacity of the
-     * specified size, in bytes.
+     * Creates a new byte array output stream, with a buffer capacity of the specified size, in bytes.
      *
      * @param size the initial size.
      * @exception IllegalArgumentException if size is negative.
@@ -69,13 +64,10 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Increases the capacity if necessary to ensure that it can hold at least
-     * the number of elements specified by the minimum capacity argument.
+     * Increases the capacity if necessary to ensure that it can hold at least the number of elements specified by the minimum capacity argument.
      *
      * @param minCapacity the desired minimum capacity
-     * @throws OutOfMemoryError if {@code minCapacity < 0}. This is interpreted
-     * as a request for the unsatisfiably large capacity
-     * {@code (long) Integer.MAX_VALUE + (minCapacity - Integer.MAX_VALUE)}.
+     * @throws OutOfMemoryError if {@code minCapacity < 0}. This is interpreted as a request for the unsatisfiably large capacity {@code (long) Integer.MAX_VALUE + (minCapacity - Integer.MAX_VALUE)}.
      */
     private void ensureCapacity(int minCapacity) {
         // overflow-conscious code
@@ -85,8 +77,7 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Increases the capacity to ensure that it can hold at least the number of
-     * elements specified by the minimum capacity argument.
+     * Increases the capacity to ensure that it can hold at least the number of elements specified by the minimum capacity argument.
      *
      * @param minCapacity the desired minimum capacity
      */
@@ -119,8 +110,7 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Writes <code>len</code> bytes from the specified byte array starting at
-     * offset <code>off</code> to this byte array output stream.
+     * Writes <code>len</code> bytes from the specified byte array starting at offset <code>off</code> to this byte array output stream.
      *
      * @param b the data.
      * @param off the start offset in the data.
@@ -138,9 +128,8 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Writes the complete contents of this byte array output stream to the
-     * specified output stream argument, as if by calling the output stream's
-     * write method using <code>out.write(buf, 0, count)</code>.
+     * Writes the complete contents of this byte array output stream to the specified output stream argument, as if by calling the output stream's write method using
+     * <code>out.write(buf, 0, count)</code>.
      *
      * @param out the output stream to which to write the data.
      * @exception IOException if an I/O error occurs.
@@ -150,10 +139,8 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Resets the <code>count</code> field of this byte array output stream to
-     * zero, so that all currently accumulated output in the output stream is
-     * discarded. The output stream can be used again, reusing the already
-     * allocated buffer space.
+     * Resets the <code>count</code> field of this byte array output stream to zero, so that all currently accumulated output in the output stream is discarded. The output stream can be used again,
+     * reusing the already allocated buffer space.
      *
      * @see java.io.ByteArrayInputStream#count
      */
@@ -162,9 +149,7 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Creates a newly allocated byte array. Its size is the current size of
-     * this output stream and the valid contents of the buffer have been copied
-     * into it.
+     * Creates a newly allocated byte array. Its size is the current size of this output stream and the valid contents of the buffer have been copied into it.
      *
      * @return the current contents of this output stream, as a byte array.
      * @see java.io.ByteArrayOutputStream#size()
@@ -176,8 +161,7 @@ public class ByteArrayOutputStream extends OutputStream {
     /**
      * Returns the current size of the buffer.
      *
-     * @return the value of the <code>count</code> field, which is the number of
-     * valid bytes in this output stream.
+     * @return the value of the <code>count</code> field, which is the number of valid bytes in this output stream.
      * @see java.io.ByteArrayOutputStream#count
      */
     public int size() {
@@ -185,16 +169,12 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Converts the buffer's contents into a string decoding bytes using the
-     * platform's default character set. The length of the new <tt>String</tt>
-     * is a function of the character set, and hence may not be equal to the
-     * size of the buffer.
+     * Converts the buffer's contents into a string decoding bytes using the platform's default character set. The length of the new <tt>String</tt>
+     * is a function of the character set, and hence may not be equal to the size of the buffer.
      *
      * <p>
-     * This method always replaces malformed-input and unmappable-character
-     * sequences with the default replacement string for the platform's default
-     * character set. The {@linkplain java.nio.charset.CharsetDecoder} class
-     * should be used when more control over the decoding process is required.
+     * This method always replaces malformed-input and unmappable-character sequences with the default replacement string for the platform's default character set. The
+     * {@linkplain java.nio.charset.CharsetDecoder} class should be used when more control over the decoding process is required.
      *
      * @return String decoded from the buffer's contents.
      * @since JDK1.1
@@ -205,22 +185,16 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Converts the buffer's contents into a string by decoding the bytes using
-     * the specified {@link java.nio.charset.Charset charsetName}. The length of
-     * the new <tt>String</tt> is a function of the charset, and hence may not
-     * be equal to the length of the byte array.
+     * Converts the buffer's contents into a string by decoding the bytes using the specified {@link java.nio.charset.Charset charsetName}. The length of the new <tt>String</tt> is a function of the
+     * charset, and hence may not be equal to the length of the byte array.
      *
      * <p>
-     * This method always replaces malformed-input and unmappable-character
-     * sequences with this charset's default replacement string. The {@link
-     * java.nio.charset.CharsetDecoder} class should be used when more control
-     * over the decoding process is required.
+     * This method always replaces malformed-input and unmappable-character sequences with this charset's default replacement string. The {@link
+     * java.nio.charset.CharsetDecoder} class should be used when more control over the decoding process is required.
      *
-     * @param charsetName the name of a supported
-     *              {@linkplain java.nio.charset.Charset <code>charset</code>}
+     * @param charsetName the name of a supported {@linkplain java.nio.charset.Charset <code>charset</code>}
      * @return String decoded from the buffer's contents.
-     * @exception UnsupportedEncodingException If the named charset is not
-     * supported
+     * @exception UnsupportedEncodingException If the named charset is not supported
      * @since JDK1.1
      */
     public String toString(String charsetName) throws UnsupportedEncodingException {
@@ -228,9 +202,7 @@ public class ByteArrayOutputStream extends OutputStream {
     }
 
     /**
-     * Closing a <tt>ByteArrayOutputStream</tt> has no effect. The methods in
-     * this class can be called after the stream has been closed without
-     * generating an <tt>IOException</tt>.
+     * Closing a <tt>ByteArrayOutputStream</tt> has no effect. The methods in this class can be called after the stream has been closed without generating an <tt>IOException</tt>.
      * <p>
      *
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteCountingOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteCountingOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteCountingOutputStream.java
index e71937e..9bbd45e 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteCountingOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ByteCountingOutputStream.java
@@ -27,13 +27,12 @@ public class ByteCountingOutputStream extends OutputStream {
     public ByteCountingOutputStream(final OutputStream out) {
         this.out = out;
     }
-    
+
     public ByteCountingOutputStream(final OutputStream out, final long initialByteCount) {
         this.out = out;
         this.bytesWritten = initialByteCount;
     }
 
-
     @Override
     public void write(int b) throws IOException {
         out.write(b);
@@ -45,8 +44,6 @@ public class ByteCountingOutputStream extends OutputStream {
         write(b, 0, b.length);
     }
 
-    ;
-    
     @Override
     public void write(byte[] b, int off, int len) throws IOException {
         out.write(b, off, len);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/DataOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/DataOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/DataOutputStream.java
index 1dd90f5..e205996 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/DataOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/DataOutputStream.java
@@ -23,14 +23,12 @@ import java.io.OutputStream;
 import java.io.UTFDataFormatException;
 
 /**
- * This class is different from java.io.DataOutputStream in that it does
- * synchronize on its methods.
+ * This class is different from java.io.DataOutputStream in that it does synchronize on its methods.
  */
 public class DataOutputStream extends FilterOutputStream implements DataOutput {
 
     /**
-     * The number of bytes written to the data output stream so far. If this
-     * counter overflows, it will be wrapped to Integer.MAX_VALUE.
+     * The number of bytes written to the data output stream so far. If this counter overflows, it will be wrapped to Integer.MAX_VALUE.
      */
     protected int written;
 
@@ -40,9 +38,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     private byte[] bytearr = null;
 
     /**
-     * Creates a new data output stream to write data to the specified
-     * underlying output stream. The counter <code>written</code> is set to
-     * zero.
+     * Creates a new data output stream to write data to the specified underlying output stream. The counter <code>written</code> is set to zero.
      *
      * @param out the underlying output stream, to be saved for later use.
      * @see java.io.FilterOutputStream#out
@@ -52,8 +48,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Increases the written counter by the specified value until it reaches
-     * Integer.MAX_VALUE.
+     * Increases the written counter by the specified value until it reaches Integer.MAX_VALUE.
      */
     private void incCount(int value) {
         int temp = written + value;
@@ -64,9 +59,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes the specified byte (the low eight bits of the argument
-     * <code>b</code>) to the underlying output stream. If no exception is
-     * thrown, the counter <code>written</code> is incremented by
+     * Writes the specified byte (the low eight bits of the argument <code>b</code>) to the underlying output stream. If no exception is thrown, the counter <code>written</code> is incremented by
      * <code>1</code>.
      * <p>
      * Implements the <code>write</code> method of <code>OutputStream</code>.
@@ -82,10 +75,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes <code>len</code> bytes from the specified byte array starting at
-     * offset <code>off</code> to the underlying output stream. If no exception
-     * is thrown, the counter <code>written</code> is incremented by
-     * <code>len</code>.
+     * Writes <code>len</code> bytes from the specified byte array starting at offset <code>off</code> to the underlying output stream. If no exception is thrown, the counter <code>written</code> is
+     * incremented by <code>len</code>.
      *
      * @param b the data.
      * @param off the start offset in the data.
@@ -100,11 +91,9 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Flushes this data output stream. This forces any buffered output bytes to
-     * be written out to the stream.
+     * Flushes this data output stream. This forces any buffered output bytes to be written out to the stream.
      * <p>
-     * The <code>flush</code> method of <code>DataOutputStream</code> calls the
-     * <code>flush</code> method of its underlying output stream.
+     * The <code>flush</code> method of <code>DataOutputStream</code> calls the <code>flush</code> method of its underlying output stream.
      *
      * @exception IOException if an I/O error occurs.
      * @see java.io.FilterOutputStream#out
@@ -116,11 +105,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes a <code>boolean</code> to the underlying output stream as a 1-byte
-     * value. The value <code>true</code> is written out as the value
-     * <code>(byte)1</code>; the value <code>false</code> is written out as the
-     * value <code>(byte)0</code>. If no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>1</code>.
+     * Writes a <code>boolean</code> to the underlying output stream as a 1-byte value. The value <code>true</code> is written out as the value <code>(byte)1</code>; the value <code>false</code> is
+     * written out as the value <code>(byte)0</code>. If no exception is thrown, the counter <code>written</code> is incremented by <code>1</code>.
      *
      * @param v a <code>boolean</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -133,9 +119,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes out a <code>byte</code> to the underlying output stream as a
-     * 1-byte value. If no exception is thrown, the counter <code>written</code>
-     * is incremented by <code>1</code>.
+     * Writes out a <code>byte</code> to the underlying output stream as a 1-byte value. If no exception is thrown, the counter <code>written</code> is incremented by <code>1</code>.
      *
      * @param v a <code>byte</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -148,9 +132,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes a <code>short</code> to the underlying output stream as two bytes,
-     * high byte first. If no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>2</code>.
+     * Writes a <code>short</code> to the underlying output stream as two bytes, high byte first. If no exception is thrown, the counter <code>written</code> is incremented by <code>2</code>.
      *
      * @param v a <code>short</code> to be written.
      * @exception IOException if an I/O error occurs.
@@ -164,9 +146,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes a <code>char</code> to the underlying output stream as a 2-byte
-     * value, high byte first. If no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>2</code>.
+     * Writes a <code>char</code> to the underlying output stream as a 2-byte value, high byte first. If no exception is thrown, the counter <code>written</code> is incremented by <code>2</code>.
      *
      * @param v a <code>char</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -180,9 +160,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes an <code>int</code> to the underlying output stream as four bytes,
-     * high byte first. If no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>4</code>.
+     * Writes an <code>int</code> to the underlying output stream as four bytes, high byte first. If no exception is thrown, the counter <code>written</code> is incremented by <code>4</code>.
      *
      * @param v an <code>int</code> to be written.
      * @exception IOException if an I/O error occurs.
@@ -200,9 +178,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     private final byte writeBuffer[] = new byte[8];
 
     /**
-     * Writes a <code>long</code> to the underlying output stream as eight
-     * bytes, high byte first. In no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>8</code>.
+     * Writes a <code>long</code> to the underlying output stream as eight bytes, high byte first. In no exception is thrown, the counter <code>written</code> is incremented by <code>8</code>.
      *
      * @param v a <code>long</code> to be written.
      * @exception IOException if an I/O error occurs.
@@ -223,11 +199,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Converts the float argument to an <code>int</code> using the
-     * <code>floatToIntBits</code> method in class <code>Float</code>, and then
-     * writes that <code>int</code> value to the underlying output stream as a
-     * 4-byte quantity, high byte first. If no exception is thrown, the counter
-     * <code>written</code> is incremented by <code>4</code>.
+     * Converts the float argument to an <code>int</code> using the <code>floatToIntBits</code> method in class <code>Float</code>, and then writes that <code>int</code> value to the underlying output
+     * stream as a 4-byte quantity, high byte first. If no exception is thrown, the counter <code>written</code> is incremented by <code>4</code>.
      *
      * @param v a <code>float</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -240,11 +213,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Converts the double argument to a <code>long</code> using the
-     * <code>doubleToLongBits</code> method in class <code>Double</code>, and
-     * then writes that <code>long</code> value to the underlying output stream
-     * as an 8-byte quantity, high byte first. If no exception is thrown, the
-     * counter <code>written</code> is incremented by <code>8</code>.
+     * Converts the double argument to a <code>long</code> using the <code>doubleToLongBits</code> method in class <code>Double</code>, and then writes that <code>long</code> value to the underlying
+     * output stream as an 8-byte quantity, high byte first. If no exception is thrown, the counter <code>written</code> is incremented by <code>8</code>.
      *
      * @param v a <code>double</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -257,10 +227,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes out the string to the underlying output stream as a sequence of
-     * bytes. Each character in the string is written out, in sequence, by
-     * discarding its high eight bits. If no exception is thrown, the counter
-     * <code>written</code> is incremented by the length of <code>s</code>.
+     * Writes out the string to the underlying output stream as a sequence of bytes. Each character in the string is written out, in sequence, by discarding its high eight bits. If no exception is
+     * thrown, the counter <code>written</code> is incremented by the length of <code>s</code>.
      *
      * @param s a string of bytes to be written.
      * @exception IOException if an I/O error occurs.
@@ -276,11 +244,8 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Writes a string to the underlying output stream as a sequence of
-     * characters. Each character is written to the data output stream as if by
-     * the <code>writeChar</code> method. If no exception is thrown, the counter
-     * <code>written</code> is incremented by twice the length of
-     * <code>s</code>.
+     * Writes a string to the underlying output stream as a sequence of characters. Each character is written to the data output stream as if by the <code>writeChar</code> method. If no exception is
+     * thrown, the counter <code>written</code> is incremented by twice the length of <code>s</code>.
      *
      * @param s a <code>String</code> value to be written.
      * @exception IOException if an I/O error occurs.
@@ -303,15 +268,10 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
      * <a href="DataInput.html#modified-utf-8">modified UTF-8</a>
      * encoding in a machine-independent manner.
      * <p>
-     * First, two bytes are written to the output stream as if by the
-     * <code>writeShort</code> method giving the number of bytes to follow. This
-     * value is the number of bytes actually written out, not the length of the
-     * string. Following the length, each character of the string is output, in
-     * sequence, using the modified UTF-8 encoding for the character. If no
-     * exception is thrown, the counter <code>written</code> is incremented by
-     * the total number of bytes written to the output stream. This will be at
-     * least two plus the length of <code>str</code>, and at most two plus
-     * thrice the length of <code>str</code>.
+     * First, two bytes are written to the output stream as if by the <code>writeShort</code> method giving the number of bytes to follow. This value is the number of bytes actually written out, not
+     * the length of the string. Following the length, each character of the string is output, in sequence, using the modified UTF-8 encoding for the character. If no exception is thrown, the counter
+     * <code>written</code> is incremented by the total number of bytes written to the output stream. This will be at least two plus the length of <code>str</code>, and at most two plus thrice the
+     * length of <code>str</code>.
      *
      * @param str a string to be written.
      * @exception IOException if an I/O error occurs.
@@ -326,15 +286,10 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
      * <a href="DataInput.html#modified-utf-8">modified UTF-8</a>
      * encoding in a machine-independent manner.
      * <p>
-     * First, two bytes are written to out as if by the <code>writeShort</code>
-     * method giving the number of bytes to follow. This value is the number of
-     * bytes actually written out, not the length of the string. Following the
-     * length, each character of the string is output, in sequence, using the
-     * modified UTF-8 encoding for the character. If no exception is thrown, the
-     * counter <code>written</code> is incremented by the total number of bytes
-     * written to the output stream. This will be at least two plus the length
-     * of <code>str</code>, and at most two plus thrice the length of
-     * <code>str</code>.
+     * First, two bytes are written to out as if by the <code>writeShort</code> method giving the number of bytes to follow. This value is the number of bytes actually written out, not the length of
+     * the string. Following the length, each character of the string is output, in sequence, using the modified UTF-8 encoding for the character. If no exception is thrown, the counter
+     * <code>written</code> is incremented by the total number of bytes written to the output stream. This will be at least two plus the length of <code>str</code>, and at most two plus thrice the
+     * length of <code>str</code>.
      *
      * @param str a string to be written.
      * @param out destination to write to
@@ -404,9 +359,7 @@ public class DataOutputStream extends FilterOutputStream implements DataOutput {
     }
 
     /**
-     * Returns the current value of the counter <code>written</code>, the number
-     * of bytes written to this data output stream so far. If the counter
-     * overflows, it will be wrapped to Integer.MAX_VALUE.
+     * Returns the current value of the counter <code>written</code>, the number of bytes written to this data output stream so far. If the counter overflows, it will be wrapped to Integer.MAX_VALUE.
      *
      * @return the value of the <code>written</code> field.
      * @see java.io.DataOutputStream#written

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/GZIPOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/GZIPOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/GZIPOutputStream.java
index 2864bbb..1e2f3c7 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/GZIPOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/GZIPOutputStream.java
@@ -21,9 +21,7 @@ import java.io.OutputStream;
 
 /**
  * <p>
- * This class extends the {@link java.util.zip.GZIPOutputStream} by allowing the
- * constructor to provide a compression level, and uses a default value of 1,
- * rather than 5.
+ * This class extends the {@link java.util.zip.GZIPOutputStream} by allowing the constructor to provide a compression level, and uses a default value of 1, rather than 5.
  * </p>
  */
 public class GZIPOutputStream extends java.util.zip.GZIPOutputStream {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java
index ac6322d..5153db5 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java
@@ -190,8 +190,7 @@ public class LeakyBucketStreamThrottler implements StreamThrottler {
     }
 
     /**
-     * This class is responsible for draining water from the leaky bucket. I.e.,
-     * it actually moves the data
+     * This class is responsible for draining water from the leaky bucket. I.e., it actually moves the data
      */
     private class Drain implements Runnable {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/MinimumLengthInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/MinimumLengthInputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/MinimumLengthInputStream.java
index 7d6d8d1..44e9c2e 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/MinimumLengthInputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/MinimumLengthInputStream.java
@@ -22,8 +22,7 @@ import java.io.IOException;
 import java.io.InputStream;
 
 /**
- * An InputStream that will throw EOFException if the underlying InputStream
- * runs out of data before reaching the configured minimum amount of data
+ * An InputStream that will throw EOFException if the underlying InputStream runs out of data before reaching the configured minimum amount of data
  */
 public class MinimumLengthInputStream extends FilterInputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/NonCloseableInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/NonCloseableInputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/NonCloseableInputStream.java
index 0e75a22..27a0c47 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/NonCloseableInputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/NonCloseableInputStream.java
@@ -21,9 +21,8 @@ import java.io.IOException;
 import java.io.InputStream;
 
 /**
- * Wraps and InputStream so that the underlying InputStream cannot be closed.
- * This is used so that the InputStream can be wrapped with yet another
- * InputStream and prevent the outer layer from closing the inner InputStream
+ * Wraps and InputStream so that the underlying InputStream cannot be closed. This is used so that the InputStream can be wrapped with yet another InputStream and prevent the outer layer from closing
+ * the inner InputStream
  */
 public class NonCloseableInputStream extends FilterInputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/StreamUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/StreamUtils.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/StreamUtils.java
index 101a546..64f6eaa 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/StreamUtils.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/StreamUtils.java
@@ -40,9 +40,7 @@ public class StreamUtils {
     }
 
     /**
-     * Copies <code>numBytes</code> from <code>source</code> to
-     * <code>destination</code>. If <code>numBytes</code> are not available from
-     * <code>source</code>, throws EOFException
+     * Copies <code>numBytes</code> from <code>source</code> to <code>destination</code>. If <code>numBytes</code> are not available from <code>source</code>, throws EOFException
      *
      * @param source the source of bytes to copy
      * @param destination the destination to copy bytes to
@@ -64,9 +62,7 @@ public class StreamUtils {
     }
 
     /**
-     * Reads data from the given input stream, copying it to the destination
-     * byte array. If the InputStream has less data than the given byte array,
-     * throws an EOFException
+     * Reads data from the given input stream, copying it to the destination byte array. If the InputStream has less data than the given byte array, throws an EOFException
      *
      * @param source the source to copy bytes from
      * @param destination the destination to fill
@@ -77,15 +73,12 @@ public class StreamUtils {
     }
 
     /**
-     * Reads data from the given input stream, copying it to the destination
-     * byte array. If the InputStream has less data than the given byte array,
-     * throws an EOFException if <code>ensureCapacity</code> is true and
-     * otherwise returns the number of bytes copied
+     * Reads data from the given input stream, copying it to the destination byte array. If the InputStream has less data than the given byte array, throws an EOFException if
+     * <code>ensureCapacity</code> is true and otherwise returns the number of bytes copied
      *
      * @param source the source to read bytes from
      * @param destination the destination to fill
-     * @param ensureCapacity whether or not to enforce that the InputStream have
-     * at least as much data as the capacity of the destination byte array
+     * @param ensureCapacity whether or not to enforce that the InputStream have at least as much data as the capacity of the destination byte array
      * @return the number of bytes actually filled
      * @throws IOException if unable to read from the underlying stream
      */
@@ -109,10 +102,8 @@ public class StreamUtils {
     }
 
     /**
-     * Copies data from in to out until either we are out of data (returns null)
-     * or we hit one of the byte patterns identified by the
-     * <code>stoppers</code> parameter (returns the byte pattern matched). The
-     * bytes in the stopper will be copied.
+     * Copies data from in to out until either we are out of data (returns null) or we hit one of the byte patterns identified by the <code>stoppers</code> parameter (returns the byte pattern
+     * matched). The bytes in the stopper will be copied.
      *
      * @param in the source to read bytes from
      * @param out the destination to write bytes to
@@ -151,11 +142,8 @@ public class StreamUtils {
     }
 
     /**
-     * Copies data from in to out until either we are out of data (returns null)
-     * or we hit one of the byte patterns identified by the
-     * <code>stoppers</code> parameter (returns the byte pattern matched). The
-     * byte pattern matched will NOT be copied to the output and will be un-read
-     * from the input.
+     * Copies data from in to out until either we are out of data (returns null) or we hit one of the byte patterns identified by the <code>stoppers</code> parameter (returns the byte pattern
+     * matched). The byte pattern matched will NOT be copied to the output and will be un-read from the input.
      *
      * @param in the source to read bytes from
      * @param out the destination to write bytes to

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ZipOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ZipOutputStream.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ZipOutputStream.java
index 2b9050d..d30af76 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ZipOutputStream.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/ZipOutputStream.java
@@ -19,9 +19,8 @@ package org.apache.nifi.stream.io;
 import java.io.OutputStream;
 
 /**
- * This class extends the {@link java.util.zip.ZipOutputStream} by providing a
- * constructor that allows the user to specify the compression level. The
- * default compression level is 1, as opposed to Java's default of 5.
+ * This class extends the {@link java.util.zip.ZipOutputStream} by providing a constructor that allows the user to specify the compression level. The default compression level is 1, as opposed to
+ * Java's default of 5.
  */
 public class ZipOutputStream extends java.util.zip.ZipOutputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/EscapeUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/EscapeUtils.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/EscapeUtils.java
index b7a663c..46739e3 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/EscapeUtils.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/EscapeUtils.java
@@ -19,9 +19,7 @@ package org.apache.nifi.util;
 public class EscapeUtils {
 
     /**
-     * Escapes the specified html by replacing &amp;, &lt;, &gt;, &quot;, &#39;,
-     * &#x2f; with their corresponding html entity. If html is null, null is
-     * returned.
+     * Escapes the specified html by replacing &amp;, &lt;, &gt;, &quot;, &#39;, &#x2f; with their corresponding html entity. If html is null, null is returned.
      *
      * @param html to escape
      * @return escaped html

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/LongHolder.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/LongHolder.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/LongHolder.java
index ef70ce8..fa2d063 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/LongHolder.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/LongHolder.java
@@ -17,10 +17,8 @@
 package org.apache.nifi.util;
 
 /**
- * Wraps a Long value so that it can be declared <code>final</code> and still be
- * accessed from which inner classes; the functionality is similar to that of an
- * AtomicLong, but operations on this class are not atomic. This results in
- * greater performance when the atomicity is not needed.
+ * Wraps a Long value so that it can be declared <code>final</code> and still be accessed from which inner classes; the functionality is similar to that of an AtomicLong, but operations on this class
+ * are not atomic. This results in greater performance when the atomicity is not needed.
  */
 public class LongHolder extends ObjectHolder<Long> {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/NaiveSearchRingBuffer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/NaiveSearchRingBuffer.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/NaiveSearchRingBuffer.java
index b7831de..0c6c575 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/NaiveSearchRingBuffer.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/NaiveSearchRingBuffer.java
@@ -24,9 +24,8 @@ import java.util.Arrays;
  * </p>
  *
  * <p>
- * This class implements an efficient naive search algorithm, which allows the
- * user of the library to identify byte sequences in a stream on-the-fly so that
- * the stream can be segmented without having to buffer the data.
+ * This class implements an efficient naive search algorithm, which allows the user of the library to identify byte sequences in a stream on-the-fly so that the stream can be segmented without having
+ * to buffer the data.
  * </p>
  *
  * <p>
@@ -60,10 +59,8 @@ public class NaiveSearchRingBuffer {
     }
 
     /**
-     * @return the contents of the internal buffer, which represents the last X
-     * bytes added to the buffer, where X is the minimum of the number of bytes
-     * added to the buffer or the length of the byte sequence for which we are
-     * looking
+     * @return the contents of the internal buffer, which represents the last X bytes added to the buffer, where X is the minimum of the number of bytes added to the buffer or the length of the byte
+     * sequence for which we are looking
      */
     public byte[] getBufferContents() {
         final int contentLength = Math.min(lookingFor.length, bufferSize);
@@ -83,9 +80,7 @@ public class NaiveSearchRingBuffer {
     }
 
     /**
-     * @return <code>true</code> if the number of bytes that have been added to
-     * the buffer is at least equal to the length of the byte sequence for which
-     * we are searching
+     * @return <code>true</code> if the number of bytes that have been added to the buffer is at least equal to the length of the byte sequence for which we are searching
      */
     public boolean isFilled() {
         return bufferSize >= buffer.length;
@@ -101,12 +96,10 @@ public class NaiveSearchRingBuffer {
     }
 
     /**
-     * Add the given byte to the buffer and notify whether or not the byte
-     * completes the desired byte sequence.
+     * Add the given byte to the buffer and notify whether or not the byte completes the desired byte sequence.
      *
      * @param data the data to add to the buffer
-     * @return <code>true</code> if this byte completes the byte sequence,
-     * <code>false</code> otherwise.
+     * @return <code>true</code> if this byte completes the byte sequence, <code>false</code> otherwise.
      */
     public boolean addAndCompare(final byte data) {
         buffer[insertionPointer] = data;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/RingBuffer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/RingBuffer.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/RingBuffer.java
index b46bae5..8cadc01 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/RingBuffer.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/RingBuffer.java
@@ -42,8 +42,7 @@ public class RingBuffer<T> {
     }
 
     /**
-     * Adds the given value to the RingBuffer and returns the value that was
-     * removed in order to make room.
+     * Adds the given value to the RingBuffer and returns the value that was removed in order to make room.
      *
      * @param value the new value to add
      * @return value previously in the buffer
@@ -202,11 +201,8 @@ public class RingBuffer<T> {
     }
 
     /**
-     * Iterates over each element in the RingBuffer, calling the
-     * {@link ForEachEvaluator#evaluate(Object) evaluate} method on each element
-     * in the RingBuffer. If the Evaluator returns {@code false}, the method
-     * will skip all remaining elements in the RingBuffer; otherwise, the next
-     * element will be evaluated until all elements have been evaluated.
+     * Iterates over each element in the RingBuffer, calling the {@link ForEachEvaluator#evaluate(Object) evaluate} method on each element in the RingBuffer. If the Evaluator returns {@code false},
+     * the method will skip all remaining elements in the RingBuffer; otherwise, the next element will be evaluated until all elements have been evaluated.
      *
      * @param evaluator used to evaluate each item in the ring buffer
      */
@@ -215,15 +211,11 @@ public class RingBuffer<T> {
     }
 
     /**
-     * Iterates over each element in the RingBuffer, calling the
-     * {@link ForEachEvaluator#evaluate(Object) evaluate} method on each element
-     * in the RingBuffer. If the Evaluator returns {@code false}, the method
-     * will skip all remaining elements in the RingBuffer; otherwise, the next
-     * element will be evaluated until all elements have been evaluated.
+     * Iterates over each element in the RingBuffer, calling the {@link ForEachEvaluator#evaluate(Object) evaluate} method on each element in the RingBuffer. If the Evaluator returns {@code false},
+     * the method will skip all remaining elements in the RingBuffer; otherwise, the next element will be evaluated until all elements have been evaluated.
      *
      * @param evaluator the evaluator
-     * @param iterationDirection the order in which to iterate over the elements
-     * in the RingBuffer
+     * @param iterationDirection the order in which to iterate over the elements in the RingBuffer
      */
     public void forEach(final ForEachEvaluator<T> evaluator, final IterationDirection iterationDirection) {
         readLock.lock();
@@ -266,16 +258,14 @@ public class RingBuffer<T> {
     }
 
     /**
-     * Defines an interface that can be used to iterate over all of the elements
-     * in the RingBuffer via the {@link #forEach} method
+     * Defines an interface that can be used to iterate over all of the elements in the RingBuffer via the {@link #forEach} method
      *
      * @param <S> the type to evaluate
      */
     public static interface ForEachEvaluator<S> {
 
         /**
-         * Evaluates the given element and returns {@code true} if the next
-         * element should be evaluated, {@code false} otherwise
+         * Evaluates the given element and returns {@code true} if the next element should be evaluated, {@code false} otherwise
          *
          * @param value the value to evaluate
          * @return true if should continue evaluating; false otherwise

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/525ce7fb/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/StopWatch.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/StopWatch.java b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/StopWatch.java
index bc8ab75..cffe49c 100644
--- a/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/StopWatch.java
+++ b/nifi/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/StopWatch.java
@@ -58,8 +58,7 @@ public final class StopWatch {
      * @param timeUnit the unit for which the duration should be reported
      * @return the duration of the stopwatch in the specified unit
      *
-     * @throws IllegalStateException if the StopWatch has not been stopped via
-     * {@link #stop()}
+     * @throws IllegalStateException if the StopWatch has not been stopped via {@link #stop()}
      */
     public long getDuration(final TimeUnit timeUnit) {
         if (duration < 0) {


[24/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImpl.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImpl.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImpl.java
index 81bb7a7..32181e3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImpl.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImpl.java
@@ -59,21 +59,14 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * An implementation of the <code>HttpRequestReplicator</code> interface. This
- * implementation parallelizes the node HTTP requests using the given
- * <code>ExecutorService</code> instance. Individual requests may have
- * connection and read timeouts set, which may be set during instance
- * construction. Otherwise, the default is not to timeout.
+ * An implementation of the <code>HttpRequestReplicator</code> interface. This implementation parallelizes the node HTTP requests using the given <code>ExecutorService</code> instance. Individual
+ * requests may have connection and read timeouts set, which may be set during instance construction. Otherwise, the default is not to timeout.
  *
- * If a node protocol scheme is provided during construction, then all requests
- * will be replicated using the given scheme. If null is provided as the scheme
- * (the default), then the requests will be replicated using the scheme of the
- * original URI.
+ * If a node protocol scheme is provided during construction, then all requests will be replicated using the given scheme. If null is provided as the scheme (the default), then the requests will be
+ * replicated using the scheme of the original URI.
  *
- * Clients must call start() and stop() to initialize and shutdown the instance.
- * The instance must be started before issuing any replication requests.
+ * Clients must call start() and stop() to initialize and shutdown the instance. The instance must be started before issuing any replication requests.
  *
- * @author unattributed
  */
 public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
 
@@ -97,11 +90,9 @@ public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
     private String nodeProtocolScheme = null;
 
     /**
-     * Creates an instance. The connection timeout and read timeout will be
-     * infinite.
+     * Creates an instance. The connection timeout and read timeout will be infinite.
      *
-     * @param numThreads the number of threads to use when parallelizing
-     * requests
+     * @param numThreads the number of threads to use when parallelizing requests
      * @param client a client for making requests
      */
     public HttpRequestReplicatorImpl(final int numThreads, final Client client) {
@@ -111,12 +102,10 @@ public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
     /**
      * Creates an instance.
      *
-     * @param numThreads the number of threads to use when parallelizing
-     * requests
+     * @param numThreads the number of threads to use when parallelizing requests
      * @param client a client for making requests
-     * @param connectionTimeoutMs the connection timeout specified in
-     * milliseconds
-     * @param readTimeoutMs the read timeout specified in milliseconds
+     * @param connectionTimeout the connection timeout specified in milliseconds
+     * @param readTimeout the read timeout specified in milliseconds
      */
     public HttpRequestReplicatorImpl(final int numThreads, final Client client, final String connectionTimeout, final String readTimeout) {
 
@@ -178,9 +167,7 @@ public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
     /**
      * Sets the protocol scheme to use when issuing requests to nodes.
      *
-     * @param nodeProtocolScheme the scheme. Valid values are "http", "https",
-     * or null. If null is specified, then the scheme of the originating request
-     * is used when replicating that request.
+     * @param nodeProtocolScheme the scheme. Valid values are "http", "https", or null. If null is specified, then the scheme of the originating request is used when replicating that request.
      */
     public synchronized void setNodeProtocolScheme(final String nodeProtocolScheme) {
         if (StringUtils.isNotBlank(nodeProtocolScheme)) {
@@ -368,9 +355,7 @@ public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
     }
 
     /**
-     * Wraps a future node response with info from originating request. This
-     * coupling allows for futures that encountered exceptions to be linked back
-     * to the failing node and better reported.
+     * Wraps a future node response with info from originating request. This coupling allows for futures that encountered exceptions to be linked back to the failing node and better reported.
      */
     private class NodeHttpRequestFutureWrapper {
 
@@ -417,8 +402,7 @@ public class HttpRequestReplicatorImpl implements HttpRequestReplicator {
     }
 
     /**
-     * A Callable for making an HTTP request to a single node and returning its
-     * response.
+     * A Callable for making an HTTP request to a single node and returning its response.
      */
     private class NodeHttpRequestCallable implements Callable<NodeResponse> {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImpl.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImpl.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImpl.java
index afade7e..de274b1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImpl.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImpl.java
@@ -33,12 +33,9 @@ import org.slf4j.LoggerFactory;
  *
  * The algorithm is as follows.
  *
- * If any HTTP responses were 2XX, then disconnect non-2XX responses. This is
- * because 2XX may have changed a node's flow.
+ * If any HTTP responses were 2XX, then disconnect non-2XX responses. This is because 2XX may have changed a node's flow.
  *
- * If no 2XX responses were received, then the node's flow has not changed.
- * Instead of disconnecting everything, we only disconnect the nodes with
- * internal errors, i.e., 5XX responses.
+ * If no 2XX responses were received, then the node's flow has not changed. Instead of disconnecting everything, we only disconnect the nodes with internal errors, i.e., 5XX responses.
  *
  * @author unattributed
  */
@@ -60,7 +57,7 @@ public class HttpResponseMapperImpl implements HttpResponseMapper {
             }
         }
 
-        // determine the status of each node 
+        // determine the status of each node
         for (final NodeResponse nodeResponse : nodeResponses) {
 
             final Node.Status status;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/WebClusterManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/WebClusterManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/WebClusterManager.java
index 2cf026a..d3a24e7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/WebClusterManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/impl/WebClusterManager.java
@@ -232,20 +232,14 @@ import org.apache.nifi.web.api.entity.ReportingTaskEntity;
 import org.apache.nifi.web.api.entity.ReportingTasksEntity;
 
 /**
- * Provides a cluster manager implementation. The manager federates incoming
- * HTTP client requests to the nodes' external API using the HTTP protocol. The
- * manager also communicates with nodes using the nodes' internal socket
- * protocol.
+ * Provides a cluster manager implementation. The manager federates incoming HTTP client requests to the nodes' external API using the HTTP protocol. The manager also communicates with nodes using the
+ * nodes' internal socket protocol.
  *
- * The manager's socket address may broadcasted using multicast if a
- * MulticastServiceBroadcaster instance is set on this instance. The manager
- * instance must be started after setting the broadcaster.
+ * The manager's socket address may broadcasted using multicast if a MulticastServiceBroadcaster instance is set on this instance. The manager instance must be started after setting the broadcaster.
  *
- * The manager may be configured with an EventManager for recording noteworthy
- * lifecycle events (e.g., first heartbeat received, node status change).
+ * The manager may be configured with an EventManager for recording noteworthy lifecycle events (e.g., first heartbeat received, node status change).
  *
- * The start() and stop() methods must be called to initialize and stop the
- * instance.
+ * The start() and stop() methods must be called to initialize and stop the instance.
  *
  * @author unattributed
  */
@@ -258,47 +252,38 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     private static final Logger heartbeatLogger = new NiFiLog(LoggerFactory.getLogger("org.apache.nifi.cluster.heartbeat"));
 
     /**
-     * The HTTP header to store a cluster context. An example of what may be
-     * stored in the context is a node's auditable actions in response to a
-     * cluster request. The cluster context is serialized using Java's
-     * serialization mechanism and hex encoded.
+     * The HTTP header to store a cluster context. An example of what may be stored in the context is a node's auditable actions in response to a cluster request. The cluster context is serialized
+     * using Java's serialization mechanism and hex encoded.
      */
     public static final String CLUSTER_CONTEXT_HTTP_HEADER = "X-ClusterContext";
 
     /**
-     * HTTP Header that stores a unique ID for each request that is replicated
-     * to the nodes. This is used for logging purposes so that request
-     * information, such as timing, can be correlated between the NCM and the
-     * nodes
+     * HTTP Header that stores a unique ID for each request that is replicated to the nodes. This is used for logging purposes so that request information, such as timing, can be correlated between
+     * the NCM and the nodes
      */
     public static final String REQUEST_ID_HEADER = "X-RequestID";
 
     /**
-     * The HTTP header that the NCM specifies to ask a node if they are able to
-     * process a given request. The value is always 150-NodeContinue. The node
-     * will respond with 150 CONTINUE if it is able to process the request, 417
-     * EXPECTATION_FAILED otherwise.
+     * The HTTP header that the NCM specifies to ask a node if they are able to process a given request. The value is always 150-NodeContinue. The node will respond with 150 CONTINUE if it is able to
+     * process the request, 417 EXPECTATION_FAILED otherwise.
      */
     public static final String NCM_EXPECTS_HTTP_HEADER = "X-NcmExpects";
     public static final int NODE_CONTINUE_STATUS_CODE = 150;
 
     /**
-     * The HTTP header that the NCM specifies to indicate that a node should
-     * invalidate the specified user group. This is done to ensure that user
-     * cache is not stale when an administrator modifies a group through the UI.
+     * The HTTP header that the NCM specifies to indicate that a node should invalidate the specified user group. This is done to ensure that user cache is not stale when an administrator modifies a
+     * group through the UI.
      */
     public static final String CLUSTER_INVALIDATE_USER_GROUP_HEADER = "X-ClusterInvalidateUserGroup";
 
     /**
-     * The HTTP header that the NCM specifies to indicate that a node should
-     * invalidate the specified user. This is done to ensure that user cache is
-     * not stale when an administrator modifies a user through the UI.
+     * The HTTP header that the NCM specifies to indicate that a node should invalidate the specified user. This is done to ensure that user cache is not stale when an administrator modifies a user
+     * through the UI.
      */
     public static final String CLUSTER_INVALIDATE_USER_HEADER = "X-ClusterInvalidateUser";
 
     /**
-     * The default number of seconds to respond to a connecting node if the
-     * manager cannot provide it with a current data flow.
+     * The default number of seconds to respond to a connecting node if the manager cannot provide it with a current data flow.
      */
     private static final int DEFAULT_CONNECTION_REQUEST_TRY_AGAIN_SECONDS = 5;
 
@@ -398,7 +383,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
             snapshotMillis = FormatUtils.getTimeDuration(NiFiProperties.DEFAULT_COMPONENT_STATUS_SNAPSHOT_FREQUENCY, TimeUnit.MILLISECONDS);
         }
         componentStatusSnapshotMillis = snapshotMillis;
-        
+
         Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(new Runnable() {
             @Override
             public void run() {
@@ -411,7 +396,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
                                 statusRepository = createComponentStatusRepository();
                                 componentMetricsRepositoryMap.put(node.getNodeId(), statusRepository);
                             }
-                            
+
                             // ensure this node has a payload
                             if (node.getHeartbeat() != null && node.getHeartbeatPayload() != null) {
                                 // if nothing has been captured or the current heartbeat is newer, capture it - comparing the heatbeat created timestamp
@@ -422,7 +407,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
                             }
                         }
                     }
-                } catch(final Throwable t) {
+                } catch (final Throwable t) {
                     logger.warn("Unable to capture component metrics from Node heartbeats: " + t);
                     if (logger.isDebugEnabled()) {
                         logger.warn("", t);
@@ -667,14 +652,10 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Services connection requests. If the data flow management service is
-     * unable to provide a current copy of the data flow, then the returned
-     * connection response will indicate the node should try later. Otherwise,
-     * the connection response will contain the the flow and the node
-     * identifier.
+     * Services connection requests. If the data flow management service is unable to provide a current copy of the data flow, then the returned connection response will indicate the node should try
+     * later. Otherwise, the connection response will contain the the flow and the node identifier.
      *
-     * If this instance is configured with a firewall and the request is
-     * blocked, then the response will not contain a node identifier.
+     * If this instance is configured with a firewall and the request is blocked, then the response will not contain a node identifier.
      *
      * @param request a connection request
      *
@@ -754,14 +735,14 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
             }
 
             /*
-             * The manager does not have a current copy of the data flow, 
-             * so it will instruct the node to try connecting at a later 
-             * time.  Meanwhile, the flow will be locked down from user 
+             * The manager does not have a current copy of the data flow,
+             * so it will instruct the node to try connecting at a later
+             * time.  Meanwhile, the flow will be locked down from user
              * changes because the node is marked as connecting.
              */
 
             /*
-             * Create try-later response based on flow retrieval delay to give 
+             * Create try-later response based on flow retrieval delay to give
              * the flow management service a chance to retrieve a curren flow
              */
             final int tryAgainSeconds;
@@ -783,20 +764,14 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Services reconnection requests for a given node. If the node indicates
-     * reconnection failure, then the node will be set to disconnected and if
-     * the node has primary role, then the role will be revoked. Otherwise, a
-     * reconnection request will be sent to the node, initiating the connection
-     * handshake.
+     * Services reconnection requests for a given node. If the node indicates reconnection failure, then the node will be set to disconnected and if the node has primary role, then the role will be
+     * revoked. Otherwise, a reconnection request will be sent to the node, initiating the connection handshake.
      *
      * @param nodeId a node identifier
      *
      * @throws UnknownNodeException if the node does not exist
-     * @throws IllegalNodeReconnectionException if the node cannot be
-     * reconnected because the node is not disconnected
-     * @throws NodeReconnectionException if the reconnection message failed to
-     * be sent or the cluster could not provide a current data flow for the
-     * reconnection request
+     * @throws IllegalNodeReconnectionException if the node cannot be reconnected because the node is not disconnected
+     * @throws NodeReconnectionException if the reconnection message failed to be sent or the cluster could not provide a current data flow for the reconnection request
      */
     @Override
     public void requestReconnection(final String nodeId, final String userDn) throws UnknownNodeException, IllegalNodeReconnectionException {
@@ -1163,11 +1138,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
      * @param userDn the DN of the user requesting the disconnection
      *
      * @throws UnknownNodeException if the node does not exist
-     * @throws IllegalNodeDisconnectionException if the node cannot be
-     * disconnected due to the cluster's state (e.g., node is last connected
-     * node or node is primary)
-     * @throws NodeDisconnectionException if the disconnection message fails to
-     * be sent.
+     * @throws IllegalNodeDisconnectionException if the node cannot be disconnected due to the cluster's state (e.g., node is last connected node or node is primary)
+     * @throws NodeDisconnectionException if the disconnection message fails to be sent.
      */
     @Override
     public void requestDisconnection(final String nodeId, final String userDn) throws UnknownNodeException, IllegalNodeDisconnectionException, NodeDisconnectionException {
@@ -1185,8 +1157,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Requests a disconnection to the node with the given node ID, but any
-     * exception thrown is suppressed.
+     * Requests a disconnection to the node with the given node ID, but any exception thrown is suppressed.
      *
      * @param nodeId the node ID
      */
@@ -1197,28 +1168,19 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Issues a disconnection message to the node identified by the given node
-     * ID. If the node is not known, then a UnknownNodeException is thrown. If
-     * the node cannot be disconnected due to the cluster's state and
-     * ignoreLastNodeCheck is false, then a IllegalNodeDisconnectionException is
-     * thrown. Otherwise, a disconnection message is issued to the node.
+     * Issues a disconnection message to the node identified by the given node ID. If the node is not known, then a UnknownNodeException is thrown. If the node cannot be disconnected due to the
+     * cluster's state and ignoreLastNodeCheck is false, then a IllegalNodeDisconnectionException is thrown. Otherwise, a disconnection message is issued to the node.
      *
-     * Whether the disconnection message is successfully sent to the node, the
-     * node is marked as disconnected and if the node is the primary node, then
-     * the primary role is revoked.
+     * Whether the disconnection message is successfully sent to the node, the node is marked as disconnected and if the node is the primary node, then the primary role is revoked.
      *
      * @param nodeId the ID of the node
-     * @param ignoreNodeChecks if false, checks will be made to ensure the
-     * cluster supports the node's disconnection (e.g., the node is not the last
-     * connected node in the cluster; the node is not the primary); otherwise,
-     * the request is made regardless of the cluster state
+     * @param ignoreNodeChecks if false, checks will be made to ensure the cluster supports the node's disconnection (e.g., the node is not the last connected node in the cluster; the node is not the
+     * primary); otherwise, the request is made regardless of the cluster state
      * @param explanation
      *
-     * @throws IllegalNodeDisconnectionException if the node cannot be
-     * disconnected due to the cluster's state (e.g., node is last connected
-     * node or node is primary). Not thrown if ignoreNodeChecks is true.
-     * @throws NodeDisconnectionException if the disconnection message fails to
-     * be sent.
+     * @throws IllegalNodeDisconnectionException if the node cannot be disconnected due to the cluster's state (e.g., node is last connected node or node is primary). Not thrown if ignoreNodeChecks is
+     * true.
+     * @throws NodeDisconnectionException if the disconnection message fails to be sent.
      */
     private void requestDisconnection(final NodeIdentifier nodeId, final boolean ignoreNodeChecks, final String explanation)
             throws IllegalNodeDisconnectionException, NodeDisconnectionException {
@@ -1276,8 +1238,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Messages the node to have the primary role. If the messaging fails, then
-     * the node is marked as disconnected.
+     * Messages the node to have the primary role. If the messaging fails, then the node is marked as disconnected.
      *
      * @param nodeId the node ID to assign primary role
      *
@@ -1292,7 +1253,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
             msg.setPrimary(true);
             logger.info("Attempting to assign primary role to node: " + nodeId);
 
-            // message 
+            // message
             senderListener.assignPrimaryRole(msg);
 
             logger.info("Assigned primary role to node: " + nodeId);
@@ -1321,11 +1282,9 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Messages the node with the given node ID to no longer have the primary
-     * role. If the messaging fails, then the node is marked as disconnected.
+     * Messages the node with the given node ID to no longer have the primary role. If the messaging fails, then the node is marked as disconnected.
      *
-     * @return true if the primary role was revoked from the node; false
-     * otherwise
+     * @return true if the primary role was revoked from the node; false otherwise
      */
     private boolean revokePrimaryRole(final NodeIdentifier nodeId) {
         writeLock.lock();
@@ -1382,8 +1341,10 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
             final Node node = getRawNode(msg.getNodeId().getId());
             if (node != null) {
                 node.setStatus(Status.DISCONNECTED);
-                addEvent(msg.getNodeId(), "Node could not join cluster because it failed to start up properly. Setting node to Disconnected. Node reported the following error: " + msg.getExceptionMessage());
-                addBulletin(node, Severity.ERROR, "Node could not join cluster because it failed to start up properly. Setting node to Disconnected. Node reported the following error: " + msg.getExceptionMessage());
+                addEvent(msg.getNodeId(), "Node could not join cluster because it failed to start up properly. Setting node to Disconnected. Node reported "
+                        + "the following error: " + msg.getExceptionMessage());
+                addBulletin(node, Severity.ERROR, "Node could not join cluster because it failed to start up properly. Setting node to Disconnected. Node "
+                        + "reported the following error: " + msg.getExceptionMessage());
             }
         } finally {
             writeLock.unlock("handleControllerStartupFailure");
@@ -1405,14 +1366,6 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         }
     }
 
-    /**
-     * Adds an instance of a specified controller service.
-     *
-     * @param type
-     * @param id
-     * @param properties
-     * @return
-     */
     @Override
     public ControllerServiceNode createControllerService(final String type, final String id, final boolean firstTimeAdded) {
         return controllerServiceProvider.createControllerService(type, id, firstTimeAdded);
@@ -1666,7 +1619,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     /**
      * Handle a bulletins message.
      *
-     * @param bulletins
+     * @param bulletins bulletins
      */
     public void handleBulletins(final NodeBulletins bulletins) {
         final NodeIdentifier nodeIdentifier = bulletins.getNodeIdentifier();
@@ -1681,15 +1634,9 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Handles a node's heartbeat. If this heartbeat is a node's first heartbeat
-     * since its connection request, then the manager will mark the node as
-     * connected. If the node was previously disconnected due to a lack of
-     * heartbeat, then a reconnection request is issued. If the node was
-     * disconnected for other reasons, then a disconnection request is issued.
-     * If this instance is configured with a firewall and the heartbeat is
-     * blocked, then a disconnection request is issued.
-     *
-     * @param heartbeat
+     * Handles a node's heartbeat. If this heartbeat is a node's first heartbeat since its connection request, then the manager will mark the node as connected. If the node was previously disconnected
+     * due to a lack of heartbeat, then a reconnection request is issued. If the node was disconnected for other reasons, then a disconnection request is issued. If this instance is configured with a
+     * firewall and the heartbeat is blocked, then a disconnection request is issued.
      */
     @Override
     public void handleHeartbeat(final Heartbeat heartbeat) {
@@ -1703,9 +1650,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         /*
          * Processing a heartbeat requires a write lock, which may take a while
          * to obtain.  Only the last heartbeat is necessary to process per node.
-         * Futhermore, since many could pile up, heartbeats are processed in 
+         * Futhermore, since many could pile up, heartbeats are processed in
          * bulk.
-         * 
          * The below queue stores the pending heartbeats.
          */
         pendingHeartbeats.add(heartbeat);
@@ -1782,7 +1728,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
                     } else if (heartbeatIndicatesNotYetConnected) {
                         if (Status.CONNECTED == node.getStatus()) {
                             // record event
-                            addEvent(node.getNodeId(), "Received heartbeat from node that thinks it is not yet part of the cluster, though the Manager thought it was. Marking as Disconnected and issuing reconnection request.");
+                            addEvent(node.getNodeId(), "Received heartbeat from node that thinks it is not yet part of the cluster, though the Manager thought it "
+                                    + "was. Marking as Disconnected and issuing reconnection request.");
 
                             // record heartbeat
                             node.setHeartbeat(null);
@@ -1843,7 +1790,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
                         node.setHeartbeat(mostRecentHeartbeat);
                     }
                 } catch (final Exception e) {
-                    logger.error("Failed to process heartbeat from {}:{} due to {}", mostRecentHeartbeat.getNodeIdentifier().getApiAddress(), mostRecentHeartbeat.getNodeIdentifier().getApiPort(), e.toString());
+                    logger.error("Failed to process heartbeat from {}:{} due to {}",
+                            mostRecentHeartbeat.getNodeIdentifier().getApiAddress(), mostRecentHeartbeat.getNodeIdentifier().getApiPort(), e.toString());
                     if (logger.isDebugEnabled()) {
                         logger.error("", e);
                     }
@@ -2043,13 +1991,15 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
 
     @Override
     public NodeResponse applyRequest(final String method, final URI uri, final Map<String, List<String>> parameters, final Map<String, String> headers)
-            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
+            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException,
+            ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
         return applyRequest(method, uri, parameters, headers, getNodeIds(Status.CONNECTED));
     }
 
     @Override
     public NodeResponse applyRequest(final String method, final URI uri, final Map<String, List<String>> parameters, final Map<String, String> headers, final Set<NodeIdentifier> nodeIdentifiers)
-            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
+            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException,
+            ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
 
         final boolean mutableRequest = canChangeNodeState(method, uri);
         final ClusterManagerLock lock = mutableRequest ? writeLock : readLock;
@@ -2085,13 +2035,15 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
 
     @Override
     public NodeResponse applyRequest(final String method, final URI uri, final Object entity, final Map<String, String> headers)
-            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
+            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException,
+            ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
         return applyRequest(method, uri, entity, headers, getNodeIds(Status.CONNECTED));
     }
 
     @Override
     public NodeResponse applyRequest(final String method, final URI uri, final Object entity, final Map<String, String> headers, final Set<NodeIdentifier> nodeIdentifiers)
-            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
+            throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException,
+            ConnectingNodeMutableRequestException, DisconnectedNodeMutableRequestException, SafeModeMutableRequestException {
 
         final boolean mutableRequest = canChangeNodeState(method, uri);
         final ClusterManagerLock lock = mutableRequest ? writeLock : readLock;
@@ -2270,7 +2222,9 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     // requires write lock to already be acquired unless method cannot change node state
-    private NodeResponse federateRequest(final String method, final URI uri, final Map<String, List<String>> parameters, final Object entity, final Map<String, String> headers, final Set<NodeIdentifier> nodeIds) throws UriConstructionException {
+    private NodeResponse federateRequest(
+            final String method, final URI uri, final Map<String, List<String>> parameters, final Object entity, final Map<String, String> headers, final Set<NodeIdentifier> nodeIds)
+            throws UriConstructionException {
         // ensure some nodes are connected
         if (nodeIds.isEmpty()) {
             throw new NoConnectedNodesException("Cannot apply " + method + " request to " + uri + " because there are currently no connected Nodes");
@@ -2399,7 +2353,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     private static boolean isProcessorEndpoint(final URI uri, final String method) {
-        if (("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method)) && (PROCESSOR_URI_PATTERN.matcher(uri.getPath()).matches() || CLUSTER_PROCESSOR_URI_PATTERN.matcher(uri.getPath()).matches())) {
+        if (("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method))
+                && (PROCESSOR_URI_PATTERN.matcher(uri.getPath()).matches() || CLUSTER_PROCESSOR_URI_PATTERN.matcher(uri.getPath()).matches())) {
             return true;
         } else if ("POST".equalsIgnoreCase(method) && PROCESSORS_URI_PATTERN.matcher(uri.getPath()).matches()) {
             return true;
@@ -2674,7 +2629,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         }
     }
 
-    private void mergeControllerServiceReferences(final Set<ControllerServiceReferencingComponentDTO> referencingComponents, final Map<NodeIdentifier, Set<ControllerServiceReferencingComponentDTO>> referencingComponentMap) {
+    private void mergeControllerServiceReferences(
+            final Set<ControllerServiceReferencingComponentDTO> referencingComponents, final Map<NodeIdentifier, Set<ControllerServiceReferencingComponentDTO>> referencingComponentMap) {
         final Map<String, Integer> activeThreadCounts = new HashMap<>();
         final Map<String, String> states = new HashMap<>();
         for (final Map.Entry<NodeIdentifier, Set<ControllerServiceReferencingComponentDTO>> nodeEntry : referencingComponentMap.entrySet()) {
@@ -2782,12 +2738,11 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Merges the validation errors into the specified map, recording the
-     * corresponding node identifier.
+     * Merges the validation errors into the specified map, recording the corresponding node identifier.
      *
-     * @param validationErrorMap
-     * @param nodeId
-     * @param nodeValidationErrors
+     * @param validationErrorMap map
+     * @param nodeId id
+     * @param nodeValidationErrors errors
      */
     public void mergeValidationErrors(final Map<String, Set<NodeIdentifier>> validationErrorMap, final NodeIdentifier nodeId, final Collection<String> nodeValidationErrors) {
         if (nodeValidationErrors != null) {
@@ -2803,12 +2758,11 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Normalizes the validation errors by prepending the corresponding nodes
-     * when the error does not exist across all nodes.
+     * Normalizes the validation errors by prepending the corresponding nodes when the error does not exist across all nodes.
      *
-     * @param validationErrorMap
-     * @param totalNodes
-     * @return
+     * @param validationErrorMap map
+     * @param totalNodes total
+     * @return normalized errors
      */
     public Set<String> normalizedMergedValidationErrors(final Map<String, Set<NodeIdentifier>> validationErrorMap, int totalNodes) {
         final Set<String> normalizedValidationErrors = new HashSet<>();
@@ -2862,7 +2816,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         final boolean hasClientResponse = clientResponse != null;
         final boolean hasSuccessfulClientResponse = hasClientResponse && clientResponse.is2xx();
 
-        // drain the responses from the socket for those responses not being sent to the client 
+        // drain the responses from the socket for those responses not being sent to the client
         final Set<NodeResponse> nodeResponsesToDrain = new HashSet<>(updatedNodesMap.values());
         nodeResponsesToDrain.remove(clientResponse);
 
@@ -3177,7 +3131,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
                     continue;
                 }
 
-                final ControllerServiceReferencingComponentsEntity nodeResponseEntity = (nodeResponse == clientResponse) ? responseEntity : nodeResponse.getClientResponse().getEntity(ControllerServiceReferencingComponentsEntity.class);
+                final ControllerServiceReferencingComponentsEntity nodeResponseEntity =
+                        (nodeResponse == clientResponse) ? responseEntity : nodeResponse.getClientResponse().getEntity(ControllerServiceReferencingComponentsEntity.class);
                 final Set<ControllerServiceReferencingComponentDTO> nodeReferencingComponents = nodeResponseEntity.getControllerServiceReferencingComponents();
 
                 resultsMap.put(nodeResponse.getNodeId(), nodeReferencingComponents);
@@ -3243,9 +3198,9 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         }
 
         /*
-         * Nodes that encountered issues handling the request are marked as 
-         * disconnected for mutable requests (e.g., post, put, delete).  For 
-         * other requests (e.g., get, head), the nodes remain in their current 
+         * Nodes that encountered issues handling the request are marked as
+         * disconnected for mutable requests (e.g., post, put, delete).  For
+         * other requests (e.g., get, head), the nodes remain in their current
          * state even if they had problems handling the request.
          */
         if (mutableRequest) {
@@ -3260,7 +3215,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
             // mark flow as stale since this request could have changed the flow
             notifyDataFlowManagmentServiceOfFlowStateChange(PersistedFlowState.STALE);
 
-            // disconnect problematic nodes 
+            // disconnect problematic nodes
             if (!problematicNodeResponses.isEmpty()) {
                 if (problematicNodeResponses.size() < nodeResponses.size()) {
                     logger.warn(String.format("One or more nodes failed to process URI '%s'.  Requesting each node to disconnect from cluster.", uri));
@@ -3275,8 +3230,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Drains the node responses off of the socket to ensure that the socket is
-     * appropriately cleaned-up.
+     * Drains the node responses off of the socket to ensure that the socket is appropriately cleaned-up.
      *
      * @param nodeResponses the collection of node responses
      */
@@ -3313,11 +3267,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * A helper method to disconnect nodes that returned unsuccessful HTTP
-     * responses because of a replicated request. Disconnection requests are
-     * sent concurrently.
+     * A helper method to disconnect nodes that returned unsuccessful HTTP responses because of a replicated request. Disconnection requests are sent concurrently.
      *
-     * @param nodeResponses
      */
     private void disconnectNodes(final Set<NodeResponse> nodeResponses, final String explanation) {
         // return fast if nothing to do
@@ -3363,14 +3314,11 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Returns false if an internal protocol message was received by a node
-     * listed in the firewall. If no firewall is configured, then false is
-     * always returned.
+     * Returns false if an internal protocol message was received by a node listed in the firewall. If no firewall is configured, then false is always returned.
      *
      * @param ip the IP of the remote machine
      *
-     * @return false if the IP is listed in the firewall or if the firewall is
-     * not configured; true otherwise
+     * @return false if the IP is listed in the firewall or if the firewall is not configured; true otherwise
      */
     private boolean isBlockedByFirewall(final String ip) {
         if (isFirewallConfigured()) {
@@ -3417,10 +3365,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * Resolves a proposed node identifier to a node identifier that the manager
-     * approves. If the proposed node identifier conflicts with an existing node
-     * identifier, then an approved node identifier is generated and returned to
-     * the caller.
+     * Resolves a proposed node identifier to a node identifier that the manager approves. If the proposed node identifier conflicts with an existing node identifier, then an approved node identifier
+     * is generated and returned to the caller.
      *
      * @param proposedNodeId a proposed identifier
      *
@@ -3579,11 +3525,8 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * This timer task simply processes any pending heartbeats. This timer task
-     * is not strictly needed, as HeartbeatMonitoringTimerTask will do this.
-     * However, this task is scheduled much more frequently and by processing
-     * the heartbeats more frequently, the stats that we report have less of a
-     * delay.
+     * This timer task simply processes any pending heartbeats. This timer task is not strictly needed, as HeartbeatMonitoringTimerTask will do this. However, this task is scheduled much more
+     * frequently and by processing the heartbeats more frequently, the stats that we report have less of a delay.
      */
     private class ProcessPendingHeartbeatsTask extends TimerTask {
 
@@ -3599,13 +3542,9 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
     }
 
     /**
-     * A timer task to detect nodes that have not sent a heartbeat in a while.
-     * The "problem" nodes are marked as disconnected due to lack of heartbeat
-     * by the task. No disconnection request is sent to the node. This is
-     * because either the node is not functioning in which case sending the
-     * request is futile or the node is running a bit slow. In the latter case,
-     * we'll wait for the next heartbeat and send a reconnection request when we
-     * process the heartbeat in the heartbeatHandler() method.
+     * A timer task to detect nodes that have not sent a heartbeat in a while. The "problem" nodes are marked as disconnected due to lack of heartbeat by the task. No disconnection request is sent to
+     * the node. This is because either the node is not functioning in which case sending the request is futile or the node is running a bit slow. In the latter case, we'll wait for the next heartbeat
+     * and send a reconnection request when we process the heartbeat in the heartbeatHandler() method.
      */
     private class HeartbeatMonitoringTimerTask extends TimerTask {
 
@@ -3899,7 +3838,7 @@ public class WebClusterManager implements HttpClusterManager, ProtocolHandler, C
         for (final Map.Entry<Date, List<StatusSnapshot>> entry : snapshotsToAggregate.entrySet()) {
             final List<StatusSnapshot> snapshots = entry.getValue();
             final StatusSnapshot reducedSnapshot = snapshots.get(0).getValueReducer().reduce(snapshots);
-            
+
             final StatusSnapshotDTO dto = new StatusSnapshotDTO();
             dto.setTimestamp(reducedSnapshot.getTimestamp());
             dto.setStatusMetrics(StatusHistoryUtil.createStatusSnapshotDto(reducedSnapshot).getStatusMetrics());

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/node/Node.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/node/Node.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/node/Node.java
index 1b128f7..bc05b89 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/node/Node.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/node/Node.java
@@ -27,13 +27,11 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Represents a connected flow controller. Nodes always have an immutable
- * identifier and a status. The status may be changed, but never null.
+ * Represents a connected flow controller. Nodes always have an immutable identifier and a status. The status may be changed, but never null.
  *
  * A Node may be cloned, but the cloning is a shallow copy of the instance.
  *
- * This class overrides hashCode and equals and considers two instances to be
- * equal if they have the equal NodeIdentifiers.
+ * This class overrides hashCode and equals and considers two instances to be equal if they have the equal NodeIdentifiers.
  *
  * @author unattributed
  */
@@ -44,19 +42,12 @@ public class Node implements Cloneable, Comparable<Node> {
     /**
      * The semantics of a Node status are as follows:
      * <ul>
-     * <li>CONNECTED -- a flow controller that is connected to the cluster. A
-     * connecting node transitions to connected after the cluster receives the
-     * flow controller's first heartbeat. A connected node can transition to
-     * disconnecting.</li>
-     * <li>CONNECTING -- a flow controller has issued a connection request to
-     * the cluster, but has not yet sent a heartbeat. A connecting node can
-     * transition to disconnecting or connected. The cluster will not accept any
-     * external requests to change the flow while any node is connecting.</li>
-     * <li>DISCONNECTED -- a flow controller that is not connected to the
-     * cluster. A disconnected node can transition to connecting.</li>
-     * <li>DISCONNECTING -- a flow controller that is in the process of
-     * disconnecting from the cluster. A disconnecting node will always
-     * transition to disconnected.</li>
+     * <li>CONNECTED -- a flow controller that is connected to the cluster. A connecting node transitions to connected after the cluster receives the flow controller's first heartbeat. A connected
+     * node can transition to disconnecting.</li>
+     * <li>CONNECTING -- a flow controller has issued a connection request to the cluster, but has not yet sent a heartbeat. A connecting node can transition to disconnecting or connected. The cluster
+     * will not accept any external requests to change the flow while any node is connecting.</li>
+     * <li>DISCONNECTED -- a flow controller that is not connected to the cluster. A disconnected node can transition to connecting.</li>
+     * <li>DISCONNECTING -- a flow controller that is in the process of disconnecting from the cluster. A disconnecting node will always transition to disconnected.</li>
      * </ul>
      */
     public static enum Status {
@@ -93,8 +84,7 @@ public class Node implements Cloneable, Comparable<Node> {
     private AtomicLong connectionRequestedTimestamp = new AtomicLong(0L);
 
     /**
-     * a flag to indicate this node was disconnected because of a lack of
-     * heartbeat
+     * a flag to indicate this node was disconnected because of a lack of heartbeat
      */
     private boolean heartbeatDisconnection;
 
@@ -156,8 +146,7 @@ public class Node implements Cloneable, Comparable<Node> {
     }
 
     /**
-     * Sets the time when the connection request for this node was last
-     * received.
+     * Sets the time when the connection request for this node was last received.
      *
      * This method is thread-safe and may be called without obtaining any lock.
      *
@@ -168,19 +157,16 @@ public class Node implements Cloneable, Comparable<Node> {
     }
 
     /**
-     * Returns true if the node was disconnected due to lack of heartbeat; false
-     * otherwise.
+     * Returns true if the node was disconnected due to lack of heartbeat; false otherwise.
      *
-     * @return true if the node was disconnected due to lack of heartbeat; false
-     * otherwise.
+     * @return true if the node was disconnected due to lack of heartbeat; false otherwise.
      */
     public boolean isHeartbeatDisconnection() {
         return heartbeatDisconnection;
     }
 
     /**
-     * Sets the status to disconnected and flags the node as being disconnected
-     * by lack of heartbeat.
+     * Sets the status to disconnected and flags the node as being disconnected by lack of heartbeat.
      */
     public void setHeartbeatDisconnection() {
         setStatus(Status.DISCONNECTED);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/ClusterManagerProtocolServiceLocatorFactoryBean.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/ClusterManagerProtocolServiceLocatorFactoryBean.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/ClusterManagerProtocolServiceLocatorFactoryBean.java
index c369a7f..2136dad 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/ClusterManagerProtocolServiceLocatorFactoryBean.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/ClusterManagerProtocolServiceLocatorFactoryBean.java
@@ -33,12 +33,10 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
 
 /**
- * Factory bean for creating a singleton ClusterManagerProtocolServiceLocator
- * instance. If the application is configured to act as the cluster manager,
- * then null is always returned as the created instance.
+ * Factory bean for creating a singleton ClusterManagerProtocolServiceLocator instance. If the application is configured to act as the cluster manager, then null is always returned as the created
+ * instance.
  *
- * The cluster manager protocol service represents the socket endpoint for
- * sending internal socket messages to the cluster manager.
+ * The cluster manager protocol service represents the socket endpoint for sending internal socket messages to the cluster manager.
  */
 public class ClusterManagerProtocolServiceLocatorFactoryBean implements FactoryBean, ApplicationContextAware, DisposableBean {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/WebClusterManagerFactoryBean.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/WebClusterManagerFactoryBean.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/WebClusterManagerFactoryBean.java
index 7bcb203..2b3bff9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/WebClusterManagerFactoryBean.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/spring/WebClusterManagerFactoryBean.java
@@ -36,9 +36,7 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
 
 /**
- * Factory bean for creating a singleton WebClusterManager instance. If the
- * application is not configured to act as the cluster manager, then null is
- * always returned as the created instance.
+ * Factory bean for creating a singleton WebClusterManager instance. If the application is not configured to act as the cluster manager, then null is always returned as the created instance.
  */
 public class WebClusterManagerFactoryBean implements FactoryBean, ApplicationContextAware {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/event/impl/EventManagerImplTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/event/impl/EventManagerImplTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/event/impl/EventManagerImplTest.java
index 09ea44b..99c0a5a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/event/impl/EventManagerImplTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/event/impl/EventManagerImplTest.java
@@ -16,15 +16,16 @@
  */
 package org.apache.nifi.cluster.event.impl;
 
-import org.apache.nifi.cluster.event.impl.EventManagerImpl;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import org.apache.nifi.cluster.event.Event;
 import org.apache.nifi.cluster.event.Event.Category;
 import org.apache.nifi.cluster.event.EventManager;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 /**
  * @author unattributed

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewallTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewallTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewallTest.java
index e5db7ca..441a3b2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewallTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewallTest.java
@@ -20,8 +20,10 @@ import java.io.File;
 import java.io.IOException;
 import org.apache.nifi.util.file.FileUtils;
 import org.junit.After;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 import org.junit.Before;
-import static org.junit.Assert.*;
 import org.junit.Test;
 
 public class FileBasedClusterNodeFirewallTest {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImplTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImplTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImplTest.java
index 0c65aba..a7e877e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImplTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpRequestReplicatorImplTest.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.cluster.manager.impl;
 
-import org.apache.nifi.cluster.manager.impl.HttpRequestReplicatorImpl;
 import javax.ws.rs.core.Response;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.ws.rs.HttpMethod;
@@ -43,7 +42,9 @@ import org.junit.Before;
 import org.junit.Test;
 import org.apache.nifi.cluster.manager.testutils.HttpResponseAction;
 import org.apache.nifi.cluster.protocol.NodeIdentifier;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * @author unattributed

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImplTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImplTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImplTest.java
index d45a4d1..048ef2f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImplTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/impl/HttpResponseMapperImplTest.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.cluster.manager.impl;
 
-import org.apache.nifi.cluster.manager.impl.HttpResponseMapperImpl;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
 import java.io.ByteArrayInputStream;
@@ -29,10 +28,11 @@ import org.apache.nifi.cluster.manager.NodeResponse;
 import org.apache.nifi.cluster.node.Node;
 import org.apache.nifi.cluster.node.Node.Status;
 import org.apache.nifi.cluster.protocol.NodeIdentifier;
+import static org.junit.Assert.assertTrue;
 import org.junit.Before;
 import org.junit.Test;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * @author unattributed

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpRequest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpRequest.java
index 35380dd..544cd58 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpRequest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpRequest.java
@@ -30,8 +30,7 @@ import javax.ws.rs.core.MediaType;
 import org.apache.commons.lang3.StringUtils;
 
 /**
- * Encapsulates an HTTP request. The toString method returns the
- * specification-compliant request.
+ * Encapsulates an HTTP request. The toString method returns the specification-compliant request.
  *
  * @author unattributed
  */
@@ -97,9 +96,7 @@ public class HttpRequest {
     }
 
     /**
-     * A builder for constructing basic HTTP requests. It handles only enough of
-     * the HTTP specification to support basic unit testing, and it should not
-     * be used otherwise.
+     * A builder for constructing basic HTTP requests. It handles only enough of the HTTP specification to support basic unit testing, and it should not be used otherwise.
      */
     public static class HttpRequestBuilder {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponse.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponse.java
index 3aa2931..e8fd620 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponse.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponse.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import javax.ws.rs.core.Response.Status;
 
 /**
- * Encapsulates an HTTP response. The toString method returns the
- * specification-compliant response.
+ * Encapsulates an HTTP response. The toString method returns the specification-compliant response.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponseAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponseAction.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponseAction.java
index 28615d0..d4f9f96 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponseAction.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpResponseAction.java
@@ -17,9 +17,7 @@
 package org.apache.nifi.cluster.manager.testutils;
 
 /**
- * Wraps a HttpResponse with a time-delay. When the action is applied, the
- * currently executing thread sleeps for the given delay before returning the
- * response to the caller.
+ * Wraps a HttpResponse with a time-delay. When the action is applied, the currently executing thread sleeps for the given delay before returning the response to the caller.
  *
  * This class is good for simulating network latency.
  *

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpServer.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpServer.java
index f17a66c..bab3ca0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpServer.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/manager/testutils/HttpServer.java
@@ -37,8 +37,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * A simple HTTP web server that allows clients to register canned-responses to
- * respond to received requests.
+ * A simple HTTP web server that allows clients to register canned-responses to respond to received requests.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterManagerProtocolSenderImplTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterManagerProtocolSenderImplTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterManagerProtocolSenderImplTest.java
index 96943c2..1a3fdb6 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterManagerProtocolSenderImplTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterManagerProtocolSenderImplTest.java
@@ -31,11 +31,13 @@ import org.apache.nifi.cluster.protocol.message.ProtocolMessage;
 import org.apache.nifi.io.socket.ServerSocketConfiguration;
 import org.apache.nifi.io.socket.SocketConfiguration;
 import org.junit.After;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 import org.junit.Before;
 import org.junit.Test;
 import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServiceLocatorTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServiceLocatorTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServiceLocatorTest.java
index 4a69571..ea40150 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServiceLocatorTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServiceLocatorTest.java
@@ -20,10 +20,12 @@ import java.net.InetSocketAddress;
 import java.util.concurrent.TimeUnit;
 import org.apache.nifi.io.socket.multicast.DiscoverableService;
 import org.apache.nifi.io.socket.multicast.DiscoverableServiceImpl;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 import org.junit.Before;
 import org.junit.Test;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 import org.mockito.stubbing.OngoingStubbing;
 
 public class ClusterServiceLocatorTest {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServicesBroadcasterTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServicesBroadcasterTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServicesBroadcasterTest.java
index 4d85d1a..0f834fc 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServicesBroadcasterTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/ClusterServicesBroadcasterTest.java
@@ -28,7 +28,8 @@ import org.apache.nifi.io.socket.multicast.DiscoverableService;
 import org.apache.nifi.io.socket.multicast.DiscoverableServiceImpl;
 import org.apache.nifi.io.socket.multicast.MulticastConfiguration;
 import org.junit.After;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/MulticastProtocolListenerTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/MulticastProtocolListenerTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/MulticastProtocolListenerTest.java
index 6c79b90..f5037a8 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/MulticastProtocolListenerTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/MulticastProtocolListenerTest.java
@@ -35,7 +35,7 @@ import org.apache.nifi.cluster.protocol.message.ProtocolMessage;
 import org.apache.nifi.io.socket.multicast.MulticastConfiguration;
 import org.apache.nifi.io.socket.multicast.MulticastUtils;
 import org.junit.After;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/NodeProtocolSenderImplTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/NodeProtocolSenderImplTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/NodeProtocolSenderImplTest.java
index 7c62d2f..a759b86 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/NodeProtocolSenderImplTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/NodeProtocolSenderImplTest.java
@@ -111,7 +111,8 @@ public class NodeProtocolSenderImplTest {
         when(mockServiceLocator.getService()).thenReturn(service);
         when(mockHandler.canHandle(any(ProtocolMessage.class))).thenReturn(Boolean.TRUE);
         ConnectionResponseMessage mockMessage = new ConnectionResponseMessage();
-        mockMessage.setConnectionResponse(new ConnectionResponse(nodeIdentifier, new StandardDataFlow("flow".getBytes("UTF-8"), new byte[0], new byte[0]), false, null, null, UUID.randomUUID().toString()));
+        mockMessage.setConnectionResponse(new ConnectionResponse(nodeIdentifier,
+                new StandardDataFlow("flow".getBytes("UTF-8"), new byte[0], new byte[0]), false, null, null, UUID.randomUUID().toString()));
         when(mockHandler.handle(any(ProtocolMessage.class))).thenReturn(mockMessage);
 
         ConnectionRequestMessage request = new ConnectionRequestMessage();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/SocketProtocolListenerTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/SocketProtocolListenerTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/SocketProtocolListenerTest.java
index 92a7d2a..7a91c29 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/SocketProtocolListenerTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/test/java/org/apache/nifi/cluster/protocol/impl/SocketProtocolListenerTest.java
@@ -33,7 +33,8 @@ import org.apache.nifi.io.socket.ServerSocketConfiguration;
 import org.apache.nifi.io.socket.SocketConfiguration;
 import org.apache.nifi.io.socket.SocketUtils;
 import org.junit.After;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 import org.junit.Before;
 import org.junit.Test;
 


[25/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/21209b23
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/21209b23
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/21209b23

Branch: refs/heads/NIFI-292
Commit: 21209b2341533bfd16e751a98d0dc65da8eee45b
Parents: 1086094
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 14:26:14 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 14:26:14 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/cluster/event/Event.java    |   6 +-
 .../apache/nifi/cluster/event/EventManager.java |   8 +-
 .../cluster/event/impl/EventManagerImpl.java    |   6 +-
 .../cluster/firewall/ClusterNodeFirewall.java   |   6 +-
 .../impl/FileBasedClusterNodeFirewall.java      |  16 +-
 .../apache/nifi/cluster/flow/DataFlowDao.java   |   3 +-
 .../cluster/flow/DataFlowManagementService.java |  23 +-
 .../nifi/cluster/flow/StaleFlowException.java   |   3 +-
 .../nifi/cluster/flow/impl/DataFlowDaoImpl.java |  30 +--
 .../impl/DataFlowManagementServiceImpl.java     |  19 +-
 .../nifi/cluster/manager/ClusterManager.java    | 100 +++----
 .../cluster/manager/HttpClusterManager.java     | 111 +++-----
 .../cluster/manager/HttpRequestReplicator.java  |  41 +--
 .../cluster/manager/HttpResponseMapper.java     |   3 +-
 .../nifi/cluster/manager/NodeResponse.java      |  57 ++--
 .../ConnectingNodeMutableRequestException.java  |   3 +-
 ...DisconnectedNodeMutableRequestException.java |   3 +-
 .../exception/IllegalClusterStateException.java |   3 +-
 .../exception/IllegalNodeDeletionException.java |   3 +-
 .../IllegalNodeDisconnectionException.java      |   4 +-
 .../IllegalNodeReconnectionException.java       |   3 +-
 .../IneligiblePrimaryNodeException.java         |   3 +-
 .../exception/MutableRequestException.java      |   5 +-
 .../exception/NoConnectedNodesException.java    |   3 +-
 .../exception/NoResponseFromNodesException.java |   5 +-
 .../exception/NodeDisconnectionException.java   |   3 +-
 .../PrimaryRoleAssignmentException.java         |   3 +-
 .../SafeModeMutableRequestException.java        |   3 +-
 .../manager/exception/UnknownNodeException.java |   3 +-
 .../exception/UriConstructionException.java     |   4 +-
 .../manager/impl/HttpRequestReplicatorImpl.java |  42 +--
 .../manager/impl/HttpResponseMapperImpl.java    |   9 +-
 .../cluster/manager/impl/WebClusterManager.java | 261 +++++++------------
 .../java/org/apache/nifi/cluster/node/Node.java |  40 +--
 ...anagerProtocolServiceLocatorFactoryBean.java |   8 +-
 .../spring/WebClusterManagerFactoryBean.java    |   4 +-
 .../event/impl/EventManagerImplTest.java        |   5 +-
 .../impl/FileBasedClusterNodeFirewallTest.java  |   4 +-
 .../impl/HttpRequestReplicatorImplTest.java     |   5 +-
 .../impl/HttpResponseMapperImplTest.java        |   6 +-
 .../cluster/manager/testutils/HttpRequest.java  |   7 +-
 .../cluster/manager/testutils/HttpResponse.java |   3 +-
 .../manager/testutils/HttpResponseAction.java   |   4 +-
 .../cluster/manager/testutils/HttpServer.java   |   3 +-
 .../ClusterManagerProtocolSenderImplTest.java   |   6 +-
 .../impl/ClusterServiceLocatorTest.java         |   6 +-
 .../impl/ClusterServicesBroadcasterTest.java    |   3 +-
 .../impl/MulticastProtocolListenerTest.java     |   2 +-
 .../impl/NodeProtocolSenderImplTest.java        |   3 +-
 .../impl/SocketProtocolListenerTest.java        |   3 +-
 50 files changed, 318 insertions(+), 589 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/Event.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/Event.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/Event.java
index 6bc5d6c..aae93ef 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/Event.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/Event.java
@@ -20,8 +20,7 @@ import java.util.Date;
 import org.apache.commons.lang3.StringUtils;
 
 /**
- * Events describe the occurrence of something noteworthy. They record the
- * event's source, a timestamp, a description, and a category.
+ * Events describe the occurrence of something noteworthy. They record the event's source, a timestamp, a description, and a category.
  *
  * @author unattributed
  *
@@ -45,8 +44,7 @@ public class Event {
     private final String message;
 
     /**
-     * Creates an event with the current time as the timestamp and a category of
-     * "INFO".
+     * Creates an event with the current time as the timestamp and a category of "INFO".
      *
      * @param source the source
      * @param message the description

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/EventManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/EventManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/EventManager.java
index f9dfb00..3c9d441 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/EventManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/EventManager.java
@@ -19,9 +19,8 @@ package org.apache.nifi.cluster.event;
 import java.util.List;
 
 /**
- * Manages an ordered list of events. The event history size dictates the total
- * number of events to manage for a given source at a given time. When the size
- * is exceeded, the oldest event for that source is evicted.
+ * Manages an ordered list of events. The event history size dictates the total number of events to manage for a given source at a given time. When the size is exceeded, the oldest event for that
+ * source is evicted.
  *
  * @author unattributed
  */
@@ -35,8 +34,7 @@ public interface EventManager {
     void addEvent(Event event);
 
     /**
-     * Returns a list of events for a given source sorted by the event's
-     * timestamp where the most recent event is first in the list.
+     * Returns a list of events for a given source sorted by the event's timestamp where the most recent event is first in the list.
      *
      * @param eventSource the source
      *

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/impl/EventManagerImpl.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/impl/EventManagerImpl.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/impl/EventManagerImpl.java
index 7fadc78..411d6c3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/impl/EventManagerImpl.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/event/impl/EventManagerImpl.java
@@ -36,8 +36,7 @@ import org.apache.nifi.cluster.event.EventManager;
 public class EventManagerImpl implements EventManager {
 
     /**
-     * associates the source ID with an ordered queue of events, ordered by most
-     * recent event
+     * associates the source ID with an ordered queue of events, ordered by most recent event
      */
     private final Map<String, Queue<Event>> eventsMap = new HashMap<>();
 
@@ -49,8 +48,7 @@ public class EventManagerImpl implements EventManager {
     /**
      * Creates an instance.
      *
-     * @param eventHistorySize the number of events to manage for a given
-     * source. Value must be positive.
+     * @param eventHistorySize the number of events to manage for a given source. Value must be positive.
      */
     public EventManagerImpl(final int eventHistorySize) {
         if (eventHistorySize <= 0) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/ClusterNodeFirewall.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/ClusterNodeFirewall.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/ClusterNodeFirewall.java
index 08d21a5..b6713d1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/ClusterNodeFirewall.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/ClusterNodeFirewall.java
@@ -17,14 +17,12 @@
 package org.apache.nifi.cluster.firewall;
 
 /**
- * Defines the interface for restricting external client connections to a set of
- * hosts or IPs.
+ * Defines the interface for restricting external client connections to a set of hosts or IPs.
  */
 public interface ClusterNodeFirewall {
 
     /**
-     * Returns true if the given host or IP is permissible through the firewall;
-     * false otherwise.
+     * Returns true if the given host or IP is permissible through the firewall; false otherwise.
      *
      * If an IP is given, then it must be formatted in dotted decimal notation.
      *

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewall.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewall.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewall.java
index 5219629..5859e1b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewall.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/firewall/impl/FileBasedClusterNodeFirewall.java
@@ -32,11 +32,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * A file-based implementation of the ClusterFirewall interface. The class is
- * configured with a file. If the file is empty, then everything is permissible.
- * Otherwise, the file should contain hostnames or IPs formatted as dotted
- * decimals with an optional CIDR suffix. Each entry must be separated by a
- * newline. An example configuration is given below:
+ * A file-based implementation of the ClusterFirewall interface. The class is configured with a file. If the file is empty, then everything is permissible. Otherwise, the file should contain hostnames
+ * or IPs formatted as dotted decimals with an optional CIDR suffix. Each entry must be separated by a newline. An example configuration is given below:
  *
  * <code>
  * # hash character is a comment delimiter
@@ -46,12 +43,9 @@ import org.slf4j.LoggerFactory;
  * 9.10.11.12/13   # a smaller range of CIDR IPs
  * </code>
  *
- * This class allows for synchronization with an optionally configured restore
- * directory. If configured, then at startup, if the either the config file or
- * the restore directory's copy is missing, then the configuration file will be
- * copied to the appropriate location. If both restore directory contains a copy
- * that is different in content to configuration file, then an exception is
- * thrown at construction time.
+ * This class allows for synchronization with an optionally configured restore directory. If configured, then at startup, if the either the config file or the restore directory's copy is missing, then
+ * the configuration file will be copied to the appropriate location. If both restore directory contains a copy that is different in content to configuration file, then an exception is thrown at
+ * construction time.
  */
 public class FileBasedClusterNodeFirewall implements ClusterNodeFirewall {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowDao.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowDao.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowDao.java
index 9ee5aa8..c5134e3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowDao.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowDao.java
@@ -42,8 +42,7 @@ public interface DataFlowDao {
     void saveDataFlow(ClusterDataFlow dataFlow) throws DaoException;
 
     /**
-     * Sets the state of the dataflow. If the dataflow does not exist, then an
-     * exception is thrown.
+     * Sets the state of the dataflow. If the dataflow does not exist, then an exception is thrown.
      *
      * @param flowState the state of the dataflow
      *

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowManagementService.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowManagementService.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowManagementService.java
index f354507..8a2af54 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowManagementService.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/DataFlowManagementService.java
@@ -21,13 +21,9 @@ import java.util.Set;
 import org.apache.nifi.cluster.protocol.NodeIdentifier;
 
 /**
- * A service for managing the cluster's flow. The service will attempt to keep
- * the cluster's dataflow current while respecting the value of the configured
- * retrieval delay.
+ * A service for managing the cluster's flow. The service will attempt to keep the cluster's dataflow current while respecting the value of the configured retrieval delay.
  *
- * The eligible retrieval time is reset with the configured delay every time the
- * flow state is set to STALE. If the state is set to UNKNOWN or CURRENT, then
- * the flow will not be retrieved.
+ * The eligible retrieval time is reset with the configured delay every time the flow state is set to STALE. If the state is set to UNKNOWN or CURRENT, then the flow will not be retrieved.
  *
  * Clients must call start() and stop() to initialize and stop the instance.
  *
@@ -35,8 +31,7 @@ import org.apache.nifi.cluster.protocol.NodeIdentifier;
 public interface DataFlowManagementService {
 
     /**
-     * Starts the instance. Start may only be called if the instance is not
-     * running.
+     * Starts the instance. Start may only be called if the instance is not running.
      */
     void start();
 
@@ -67,8 +62,7 @@ public interface DataFlowManagementService {
     void updatePrimaryNode(NodeIdentifier nodeId) throws DaoException;
 
     /**
-     * Updates the dataflow with the given serialized form of the Controller
-     * Services that are to exist on the NCM.
+     * Updates the dataflow with the given serialized form of the Controller Services that are to exist on the NCM.
      *
      * @param serializedControllerServices services
      * @throws DaoException ex
@@ -76,8 +70,7 @@ public interface DataFlowManagementService {
     void updateControllerServices(byte[] serializedControllerServices) throws DaoException;
 
     /**
-     * Updates the dataflow with the given serialized form of Reporting Tasks
-     * that are to exist on the NCM.
+     * Updates the dataflow with the given serialized form of Reporting Tasks that are to exist on the NCM.
      *
      * @param serializedReportingTasks tasks
      * @throws DaoException ex
@@ -111,11 +104,9 @@ public interface DataFlowManagementService {
     void setNodeIds(Set<NodeIdentifier> nodeIds);
 
     /**
-     * Returns the set of node identifiers the service is using to retrieve the
-     * flow.
+     * Returns the set of node identifiers the service is using to retrieve the flow.
      *
-     * @return the set of node identifiers the service is using to retrieve the
-     * flow.
+     * @return the set of node identifiers the service is using to retrieve the flow.
      */
     Set<NodeIdentifier> getNodeIds();
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/StaleFlowException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/StaleFlowException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/StaleFlowException.java
index ce5a08b..169712a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/StaleFlowException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/StaleFlowException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.flow;
 
 /**
- * Represents the exceptional case when a caller is requesting the current flow,
- * but a current flow is not available.
+ * Represents the exceptional case when a caller is requesting the current flow, but a current flow is not available.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowDaoImpl.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowDaoImpl.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowDaoImpl.java
index e2690f7..e7aafb7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowDaoImpl.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowDaoImpl.java
@@ -67,34 +67,22 @@ import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
 /**
- * Implements the FlowDao interface. The implementation tracks the state of the
- * dataflow by annotating the filename of the flow state file. Specifically, the
- * implementation correlates PersistedFlowState states to filename extensions.
- * The correlation is as follows:
+ * Implements the FlowDao interface. The implementation tracks the state of the dataflow by annotating the filename of the flow state file. Specifically, the implementation correlates
+ * PersistedFlowState states to filename extensions. The correlation is as follows:
  * <ul>
  * <li> CURRENT maps to flow.xml </li>
  * <li> STALE maps to flow.xml.stale </li>
  * <li> UNKNOWN maps to flow.xml.unknown </li>
  * </ul>
- * Whenever the flow state changes, the flow state file's name is updated to
- * denote its state.
+ * Whenever the flow state changes, the flow state file's name is updated to denote its state.
  *
- * The implementation also provides for a restore directory that may be
- * configured for higher availability. At instance creation, if the primary or
- * restore directories have multiple flow state files, an exception is thrown.
- * If the primary directory has a current flow state file, but the restore
- * directory does not, then the primary flow state file is copied to the restore
- * directory. If the restore directory has a current flow state file, but the
- * primary directory does not, then the restore flow state file is copied to the
- * primary directory. If both the primary and restore directories have a current
- * flow state file and the files are different, then an exception is thrown.
+ * The implementation also provides for a restore directory that may be configured for higher availability. At instance creation, if the primary or restore directories have multiple flow state files,
+ * an exception is thrown. If the primary directory has a current flow state file, but the restore directory does not, then the primary flow state file is copied to the restore directory. If the
+ * restore directory has a current flow state file, but the primary directory does not, then the restore flow state file is copied to the primary directory. If both the primary and restore directories
+ * have a current flow state file and the files are different, then an exception is thrown.
  *
- * When the flow state file is saved, it is always saved first to the restore
- * directory followed by a save to the primary directory. When the flow state
- * file is loaded, a check is made to verify that the primary and restore flow
- * state files are both current. If either is not current, then an exception is
- * thrown. The primary flow state file is always read when the load method is
- * called.
+ * When the flow state file is saved, it is always saved first to the restore directory followed by a save to the primary directory. When the flow state file is loaded, a check is made to verify that
+ * the primary and restore flow state files are both current. If either is not current, then an exception is thrown. The primary flow state file is always read when the load method is called.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowManagementServiceImpl.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowManagementServiceImpl.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowManagementServiceImpl.java
index 4fa6504..5a7c1a9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowManagementServiceImpl.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/flow/impl/DataFlowManagementServiceImpl.java
@@ -45,19 +45,13 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Implements FlowManagementService interface. The service tries to keep the
- * cluster's flow current with regards to the available nodes.
+ * Implements FlowManagementService interface. The service tries to keep the cluster's flow current with regards to the available nodes.
  *
- * The instance may be configured with a retrieval delay, which will reduce the
- * number of retrievals performed by the service at the expense of increasing
- * the chances that the service will not be able to provide a current flow to
- * the caller.
+ * The instance may be configured with a retrieval delay, which will reduce the number of retrievals performed by the service at the expense of increasing the chances that the service will not be able
+ * to provide a current flow to the caller.
  *
- * By default, the service will try to update the flow as quickly as possible.
- * Configuring a delay enables a less aggressive retrieval strategy.
- * Specifically, the eligible retrieval time is reset every time the flow state
- * is set to STALE. If the state is set to UNKNOWN or CURRENT, then the flow
- * will not be retrieved.
+ * By default, the service will try to update the flow as quickly as possible. Configuring a delay enables a less aggressive retrieval strategy. Specifically, the eligible retrieval time is reset
+ * every time the flow state is set to STALE. If the state is set to UNKNOWN or CURRENT, then the flow will not be retrieved.
  *
  * @author unattributed
  */
@@ -298,8 +292,7 @@ public class DataFlowManagementServiceImpl implements DataFlowManagementService
     }
 
     /**
-     * A timer task for issuing FlowRequestMessage messages to nodes to retrieve
-     * an updated flow.
+     * A timer task for issuing FlowRequestMessage messages to nodes to retrieve an updated flow.
      */
     private class FlowRetrieverTimerTask extends TimerTask {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/ClusterManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/ClusterManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/ClusterManager.java
index be52e0f..be57562 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/ClusterManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/ClusterManager.java
@@ -39,24 +39,15 @@ import org.apache.nifi.remote.cluster.NodeInformant;
 import org.apache.nifi.reporting.BulletinRepository;
 
 /**
- * Defines the interface for a ClusterManager. The cluster manager is a
- * threadsafe centralized manager for a cluster. Members of a cluster are nodes.
- * A member becomes a node by issuing a connection request to the manager. The
- * manager maintains the set of nodes. Nodes may be disconnected, reconnected,
- * and deleted.
+ * Defines the interface for a ClusterManager. The cluster manager is a threadsafe centralized manager for a cluster. Members of a cluster are nodes. A member becomes a node by issuing a connection
+ * request to the manager. The manager maintains the set of nodes. Nodes may be disconnected, reconnected, and deleted.
  *
- * Nodes are responsible for sending heartbeats to the manager to indicate their
- * liveliness. A manager may disconnect a node if it does not receive a
- * heartbeat within a configurable time period. A cluster manager instance may
- * be configured with how often to monitor received heartbeats
- * (getHeartbeatMonitoringIntervalSeconds()) and the maximum time that may
- * elapse between node heartbeats before disconnecting the node
- * (getMaxHeartbeatGapSeconds()).
+ * Nodes are responsible for sending heartbeats to the manager to indicate their liveliness. A manager may disconnect a node if it does not receive a heartbeat within a configurable time period. A
+ * cluster manager instance may be configured with how often to monitor received heartbeats (getHeartbeatMonitoringIntervalSeconds()) and the maximum time that may elapse between node heartbeats
+ * before disconnecting the node (getMaxHeartbeatGapSeconds()).
  *
- * Since only a single node may execute isolated processors, the cluster manager
- * maintains the notion of a primary node. The primary node is chosen at cluster
- * startup and retains the role until a user requests a different node to be the
- * primary node.
+ * Since only a single node may execute isolated processors, the cluster manager maintains the notion of a primary node. The primary node is chosen at cluster startup and retains the role until a user
+ * requests a different node to be the primary node.
  *
  * @author unattributed
  */
@@ -78,8 +69,7 @@ public interface ClusterManager extends NodeInformant {
 
     /**
      * @param nodeId node identifier
-     * @return returns the node with the given identifier or null if node does
-     * not exist
+     * @return returns the node with the given identifier or null if node does not exist
      */
     Node getNode(String nodeId);
 
@@ -90,17 +80,13 @@ public interface ClusterManager extends NodeInformant {
     Set<NodeIdentifier> getNodeIds(Status... statuses);
 
     /**
-     * Deletes the node with the given node identifier. If the given node is the
-     * primary node, then a subsequent request may be made to the manager to set
-     * a new primary node.
+     * Deletes the node with the given node identifier. If the given node is the primary node, then a subsequent request may be made to the manager to set a new primary node.
      *
      * @param nodeId the node identifier
-     * @param userDn the Distinguished Name of the user requesting the node be
-     * deleted from the cluster
+     * @param userDn the Distinguished Name of the user requesting the node be deleted from the cluster
      *
      * @throws UnknownNodeException if the node does not exist
-     * @throws IllegalNodeDeletionException if the node is not in a disconnected
-     * state
+     * @throws IllegalNodeDeletionException if the node is not in a disconnected state
      */
     void deleteNode(String nodeId, String userDn) throws UnknownNodeException, IllegalNodeDeletionException;
 
@@ -114,14 +100,11 @@ public interface ClusterManager extends NodeInformant {
     ConnectionResponse requestConnection(ConnectionRequest request);
 
     /**
-     * Services reconnection requests for a given node. If the node indicates
-     * reconnection failure, then the node will be set to disconnected.
-     * Otherwise, a reconnection request will be sent to the node, initiating
-     * the connection handshake.
+     * Services reconnection requests for a given node. If the node indicates reconnection failure, then the node will be set to disconnected. Otherwise, a reconnection request will be sent to the
+     * node, initiating the connection handshake.
      *
      * @param nodeId a node identifier
-     * @param userDn the Distinguished Name of the user requesting the
-     * reconnection
+     * @param userDn the Distinguished Name of the user requesting the reconnection
      *
      * @throws UnknownNodeException if the node does not exist
      * @throws IllegalNodeReconnectionException if the node is not disconnected
@@ -132,13 +115,10 @@ public interface ClusterManager extends NodeInformant {
      * Requests the node with the given identifier be disconnected.
      *
      * @param nodeId the node identifier
-     * @param userDn the Distinguished Name of the user requesting the
-     * disconnection
+     * @param userDn the Distinguished Name of the user requesting the disconnection
      *
      * @throws UnknownNodeException if the node does not exist
-     * @throws IllegalNodeDisconnectionException if the node cannot be
-     * disconnected due to the cluster's state (e.g., node is last connected
-     * node or node is primary)
+     * @throws IllegalNodeDisconnectionException if the node cannot be disconnected due to the cluster's state (e.g., node is last connected node or node is primary)
      * @throws UnknownNodeException if the node does not exist
      * @throws IllegalNodeDisconnectionException if the node is not disconnected
      * @throws NodeDisconnectionException if the disconnection failed
@@ -146,50 +126,37 @@ public interface ClusterManager extends NodeInformant {
     void requestDisconnection(String nodeId, String userDn) throws UnknownNodeException, IllegalNodeDisconnectionException, NodeDisconnectionException;
 
     /**
-     * @return the time in seconds to wait between successive executions of
-     * heartbeat monitoring
+     * @return the time in seconds to wait between successive executions of heartbeat monitoring
      */
     int getHeartbeatMonitoringIntervalSeconds();
 
     /**
-     * @return the maximum time in seconds that is allowed between successive
-     * heartbeats of a node before disconnecting the node
+     * @return the maximum time in seconds that is allowed between successive heartbeats of a node before disconnecting the node
      */
     int getMaxHeartbeatGapSeconds();
 
     /**
-     * Returns a list of node events for the node with the given identifier. The
-     * events will be returned in order of most recent to least recent according
-     * to the creation date of the event.
+     * Returns a list of node events for the node with the given identifier. The events will be returned in order of most recent to least recent according to the creation date of the event.
      *
      * @param nodeId the node identifier
      *
-     * @return the list of events or an empty list if no node exists with the
-     * given identifier
+     * @return the list of events or an empty list if no node exists with the given identifier
      */
     List<Event> getNodeEvents(final String nodeId);
 
     /**
-     * Revokes the primary role from the current primary node and assigns the
-     * primary role to given given node ID.
+     * Revokes the primary role from the current primary node and assigns the primary role to given given node ID.
      *
-     * If role revocation fails, then the current primary node is set to
-     * disconnected while retaining the primary role and no role assignment is
-     * performed.
+     * If role revocation fails, then the current primary node is set to disconnected while retaining the primary role and no role assignment is performed.
      *
-     * If role assignment fails, then the given node is set to disconnected and
-     * is given the primary role.
+     * If role assignment fails, then the given node is set to disconnected and is given the primary role.
      *
      * @param nodeId the node identifier
-     * @param userDn the Distinguished Name of the user requesting that the
-     * Primary Node be assigned
+     * @param userDn the Distinguished Name of the user requesting that the Primary Node be assigned
      *
-     * @throws UnknownNodeException if the node with the given identifier does
-     * not exist
-     * @throws IneligiblePrimaryNodeException if the node with the given
-     * identifier is not eligible to be the primary node
-     * @throws PrimaryRoleAssignmentException if the cluster was unable to
-     * change the primary role to the requested node
+     * @throws UnknownNodeException if the node with the given identifier does not exist
+     * @throws IneligiblePrimaryNodeException if the node with the given identifier is not eligible to be the primary node
+     * @throws PrimaryRoleAssignmentException if the cluster was unable to change the primary role to the requested node
      */
     void setPrimaryNode(String nodeId, String userDn) throws UnknownNodeException, IneligiblePrimaryNodeException, PrimaryRoleAssignmentException;
 
@@ -204,20 +171,13 @@ public interface ClusterManager extends NodeInformant {
     BulletinRepository getBulletinRepository();
 
     /**
-     * Returns a {@link ProcessGroupStatus} that represents the status of all
-     * nodes with the given {@link Status}es for the given ProcessGroup id, or
-     * null if no nodes exist with the given statuses
-     *
-     * @param groupId
-     * @return
+     * @param groupId groupId
+     * @return a {@link ProcessGroupStatus} that represents the status of all nodes with the given {@link Status}es for the given ProcessGroup id, or null if no nodes exist with the given statuses
      */
     ProcessGroupStatus getProcessGroupStatus(String groupId);
 
     /**
-     * Returns a merged representation of the System Diagnostics for all nodes
-     * in the cluster
-     *
-     * @return
+     * @return a merged representation of the System Diagnostics for all nodes in the cluster
      */
     SystemDiagnostics getSystemDiagnostics();
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpClusterManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpClusterManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpClusterManager.java
index 2cf5812..97ae070 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpClusterManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpClusterManager.java
@@ -30,73 +30,53 @@ import java.util.Map;
 import java.util.Set;
 
 /**
- * Extends the ClusterManager interface to define how requests issued to the
- * cluster manager are federated to the nodes. Specifically, the HTTP protocol
- * is used for communicating requests to the cluster manager and to the nodes.
+ * Extends the ClusterManager interface to define how requests issued to the cluster manager are federated to the nodes. Specifically, the HTTP protocol is used for communicating requests to the
+ * cluster manager and to the nodes.
  *
  * @author unattributed
  */
 public interface HttpClusterManager extends ClusterManager {
 
     /**
-     * Federates the HTTP request to all connected nodes in the cluster. The
-     * given URI's host and port will not be used and instead will be adjusted
-     * for each node's host and port. The node URIs are guaranteed to be
-     * constructed before issuing any requests, so if a UriConstructionException
-     * is thrown, then it is guaranteed that no request was issued.
+     * Federates the HTTP request to all connected nodes in the cluster. The given URI's host and port will not be used and instead will be adjusted for each node's host and port. The node URIs are
+     * guaranteed to be constructed before issuing any requests, so if a UriConstructionException is thrown, then it is guaranteed that no request was issued.
      *
      * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param parameters the request parameters
      * @param headers the request headers
      *
      * @return the client response
      *
-     * @throws NoConnectedNodesException if no nodes are connected as results of
-     * the request
+     * @throws NoConnectedNodesException if no nodes are connected as results of the request
      * @throws NoResponseFromNodesException if no response could be obtained
-     * @throws UriConstructionException if there was an issue constructing the
-     * URIs tailored for each individual node
-     * @throws ConnectingNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is connecting to the cluster
-     * @throws DisconnectedNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is disconnected from the cluster
-     * @throws SafeModeMutableRequestException if the request was a PUT, POST,
-     * DELETE and a the cluster is in safe mode
+     * @throws UriConstructionException if there was an issue constructing the URIs tailored for each individual node
+     * @throws ConnectingNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is connecting to the cluster
+     * @throws DisconnectedNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is disconnected from the cluster
+     * @throws SafeModeMutableRequestException if the request was a PUT, POST, DELETE and a the cluster is in safe mode
      */
     NodeResponse applyRequest(String method, URI uri, Map<String, List<String>> parameters, Map<String, String> headers)
             throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException,
             DisconnectedNodeMutableRequestException, SafeModeMutableRequestException;
 
     /**
-     * Federates the HTTP request to the nodes specified. The given URI's host
-     * and port will not be used and instead will be adjusted for each node's
-     * host and port. The node URIs are guaranteed to be constructed before
-     * issuing any requests, so if a UriConstructionException is thrown, then it
-     * is guaranteed that no request was issued.
+     * Federates the HTTP request to the nodes specified. The given URI's host and port will not be used and instead will be adjusted for each node's host and port. The node URIs are guaranteed to be
+     * constructed before issuing any requests, so if a UriConstructionException is thrown, then it is guaranteed that no request was issued.
      *
      * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param parameters the request parameters
      * @param headers the request headers
-     * @param nodeIdentifiers the NodeIdentifier for each node that the request
-     * should be replaced to
+     * @param nodeIdentifiers the NodeIdentifier for each node that the request should be replaced to
      *
      * @return the client response
      *
-     * @throws NoConnectedNodesException if no nodes are connected as results of
-     * the request
+     * @throws NoConnectedNodesException if no nodes are connected as results of the request
      * @throws NoResponseFromNodesException if no response could be obtained
-     * @throws UriConstructionException if there was an issue constructing the
-     * URIs tailored for each individual node
-     * @throws ConnectingNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is connecting to the cluster
-     * @throws DisconnectedNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is disconnected from the cluster
-     * @throws SafeModeMutableRequestException if the request was a PUT, POST,
-     * DELETE and a the cluster is in safe mode
+     * @throws UriConstructionException if there was an issue constructing the URIs tailored for each individual node
+     * @throws ConnectingNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is connecting to the cluster
+     * @throws DisconnectedNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is disconnected from the cluster
+     * @throws SafeModeMutableRequestException if the request was a PUT, POST, DELETE and a the cluster is in safe mode
      */
     NodeResponse applyRequest(String method, URI uri, Map<String, List<String>> parameters, Map<String, String> headers,
             Set<NodeIdentifier> nodeIdentifiers)
@@ -104,64 +84,45 @@ public interface HttpClusterManager extends ClusterManager {
             DisconnectedNodeMutableRequestException, SafeModeMutableRequestException;
 
     /**
-     * Federates the HTTP request to all connected nodes in the cluster. The
-     * given URI's host and port will not be used and instead will be adjusted
-     * for each node's host and port. The node URIs are guaranteed to be
-     * constructed before issuing any requests, so if a UriConstructionException
-     * is thrown, then it is guaranteed that no request was issued.
+     * Federates the HTTP request to all connected nodes in the cluster. The given URI's host and port will not be used and instead will be adjusted for each node's host and port. The node URIs are
+     * guaranteed to be constructed before issuing any requests, so if a UriConstructionException is thrown, then it is guaranteed that no request was issued.
      *
      * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param entity the HTTP request entity
      * @param headers the request headers
      *
      * @return the client response
      *
-     * @throws NoConnectedNodesException if no nodes are connected as results of
-     * the request
+     * @throws NoConnectedNodesException if no nodes are connected as results of the request
      * @throws NoResponseFromNodesException if no response could be obtained
-     * @throws UriConstructionException if there was an issue constructing the
-     * URIs tailored for each individual node
-     * @throws ConnectingNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is connecting to the cluster
-     * @throws DisconnectedNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is disconnected from the cluster
-     * @throws SafeModeMutableRequestException if the request was a PUT, POST,
-     * DELETE and a the cluster is in safe mode
+     * @throws UriConstructionException if there was an issue constructing the URIs tailored for each individual node
+     * @throws ConnectingNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is connecting to the cluster
+     * @throws DisconnectedNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is disconnected from the cluster
+     * @throws SafeModeMutableRequestException if the request was a PUT, POST, DELETE and a the cluster is in safe mode
      */
     NodeResponse applyRequest(String method, URI uri, Object entity, Map<String, String> headers)
             throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException,
             DisconnectedNodeMutableRequestException, SafeModeMutableRequestException;
 
     /**
-     * Federates the HTTP request to the nodes specified. The given URI's host
-     * and port will not be used and instead will be adjusted for each node's
-     * host and port. The node URIs are guaranteed to be constructed before
-     * issuing any requests, so if a UriConstructionException is thrown, then it
-     * is guaranteed that no request was issued.
+     * Federates the HTTP request to the nodes specified. The given URI's host and port will not be used and instead will be adjusted for each node's host and port. The node URIs are guaranteed to be
+     * constructed before issuing any requests, so if a UriConstructionException is thrown, then it is guaranteed that no request was issued.
      *
      * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param entity the HTTP request entity
      * @param headers the request headers
-     * @param nodeIdentifiers the NodeIdentifier for each node that the request
-     * should be replaced to
+     * @param nodeIdentifiers the NodeIdentifier for each node that the request should be replaced to
      *
      * @return the client response
      *
-     * @throws NoConnectedNodesException if no nodes are connected as results of
-     * the request
+     * @throws NoConnectedNodesException if no nodes are connected as results of the request
      * @throws NoResponseFromNodesException if no response could be obtained
-     * @throws UriConstructionException if there was an issue constructing the
-     * URIs tailored for each individual node
-     * @throws ConnectingNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is connecting to the cluster
-     * @throws DisconnectedNodeMutableRequestException if the request was a PUT,
-     * POST, DELETE and a node is disconnected from the cluster
-     * @throws SafeModeMutableRequestException if the request was a PUT, POST,
-     * DELETE and a the cluster is in safe mode
+     * @throws UriConstructionException if there was an issue constructing the URIs tailored for each individual node
+     * @throws ConnectingNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is connecting to the cluster
+     * @throws DisconnectedNodeMutableRequestException if the request was a PUT, POST, DELETE and a node is disconnected from the cluster
+     * @throws SafeModeMutableRequestException if the request was a PUT, POST, DELETE and a the cluster is in safe mode
      */
     NodeResponse applyRequest(String method, URI uri, Object entity, Map<String, String> headers, Set<NodeIdentifier> nodeIdentifiers)
             throws NoConnectedNodesException, NoResponseFromNodesException, UriConstructionException, ConnectingNodeMutableRequestException,

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpRequestReplicator.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpRequestReplicator.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpRequestReplicator.java
index fb57622..2b91dbd 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpRequestReplicator.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpRequestReplicator.java
@@ -24,25 +24,21 @@ import java.util.Set;
 import org.apache.nifi.cluster.protocol.NodeIdentifier;
 
 /**
- * A service for managing the replication of requests to nodes. It is up to the
- * implementing class to decide if requests are sent concurrently or serially.
+ * A service for managing the replication of requests to nodes. It is up to the implementing class to decide if requests are sent concurrently or serially.
  *
- * Clients must call start() and stop() to initialize and shutdown the instance.
- * The instance must be started before issuing any replication requests.
+ * Clients must call start() and stop() to initialize and shutdown the instance. The instance must be started before issuing any replication requests.
  *
  * @author unattributed
  */
 public interface HttpRequestReplicator {
 
     /**
-     * Starts the instance for replicating requests. Start may only be called if
-     * the instance is not running.
+     * Starts the instance for replicating requests. Start may only be called if the instance is not running.
      */
     void start();
 
     /**
-     * Stops the instance from replicating requests. Stop may only be called if
-     * the instance is running.
+     * Stops the instance from replicating requests. Stop may only be called if the instance is running.
      */
     void stop();
 
@@ -52,47 +48,36 @@ public interface HttpRequestReplicator {
     boolean isRunning();
 
     /**
-     * Requests are sent to each node in the cluster. If the request results in
-     * an exception, then the NodeResourceResponse will contain the exception.
+     * Requests are sent to each node in the cluster. If the request results in an exception, then the NodeResourceResponse will contain the exception.
      *
-     * HTTP DELETE and OPTIONS methods must supply an empty parameters map or
-     * else and IllegalArgumentException is thrown.
+     * HTTP DELETE and OPTIONS methods must supply an empty parameters map or else and IllegalArgumentException is thrown.
      *
      * @param nodeIds the node identifiers
-     * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD,
-     * OPTIONS)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param method the HTTP method (e.g., GET, POST, PUT, DELETE, HEAD, OPTIONS)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param parameters any request parameters
      * @param headers any HTTP headers
      *
      * @return the set of node responses
      *
-     * @throws UriConstructionException if a request for a node failed to be
-     * constructed from the given prototype URI. If thrown, it is guaranteed
-     * that no request was sent.
+     * @throws UriConstructionException if a request for a node failed to be constructed from the given prototype URI. If thrown, it is guaranteed that no request was sent.
      */
     Set<NodeResponse> replicate(Set<NodeIdentifier> nodeIds, String method, URI uri, Map<String, List<String>> parameters, Map<String, String> headers) throws UriConstructionException;
 
     /**
-     * Requests are sent to each node in the cluster. If the request results in
-     * an exception, then the NodeResourceResponse will contain the exception.
+     * Requests are sent to each node in the cluster. If the request results in an exception, then the NodeResourceResponse will contain the exception.
      *
-     * HTTP DELETE, GET, HEAD, and OPTIONS methods will throw an
-     * IllegalArgumentException if used.
+     * HTTP DELETE, GET, HEAD, and OPTIONS methods will throw an IllegalArgumentException if used.
      *
      * @param nodeIds the node identifiers
      * @param method the HTTP method (e.g., POST, PUT)
-     * @param uri the base request URI (up to, but not including, the query
-     * string)
+     * @param uri the base request URI (up to, but not including, the query string)
      * @param entity an entity
      * @param headers any HTTP headers
      *
      * @return the set of node responses
      *
-     * @throws UriConstructionException if a request for a node failed to be
-     * constructed from the given prototype URI. If thrown, it is guaranteed
-     * that no request was sent.
+     * @throws UriConstructionException if a request for a node failed to be constructed from the given prototype URI. If thrown, it is guaranteed that no request was sent.
      */
     Set<NodeResponse> replicate(Set<NodeIdentifier> nodeIds, String method, URI uri, Object entity, Map<String, String> headers) throws UriConstructionException;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpResponseMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpResponseMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpResponseMapper.java
index 843a666..64c9d75 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpResponseMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/HttpResponseMapper.java
@@ -29,8 +29,7 @@ import org.apache.nifi.cluster.node.Node.Status;
 public interface HttpResponseMapper {
 
     /**
-     * Maps a HTTP response to a node response and the corresponding node
-     * status.
+     * Maps a HTTP response to a node response and the corresponding node status.
      *
      * @param requestURI the original request URI
      * @param nodeResponses a set of node resource responses

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/NodeResponse.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/NodeResponse.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/NodeResponse.java
index 958d600..ae113f4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/NodeResponse.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/NodeResponse.java
@@ -40,23 +40,15 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Encapsulates a node's response in regards to receiving a external API
- * request.
+ * Encapsulates a node's response in regards to receiving a external API request.
  *
- * Both the ClientResponse and (server) Response may be obtained from this
- * instance. The ClientResponse is stored as it is received from the node. This
- * includes the entity input stream. The Response is constructed on demand when
- * mapping a ClientResponse to the Response. The ClientResponse to Response
- * mapping includes copying the ClientResponse's input stream to the Response.
- * Therefore, the getResponse() method should not be called more than once.
- * Furthermore, the method should not be called if the caller has already read
- * the ClientResponse's input stream.
+ * Both the ClientResponse and (server) Response may be obtained from this instance. The ClientResponse is stored as it is received from the node. This includes the entity input stream. The Response
+ * is constructed on demand when mapping a ClientResponse to the Response. The ClientResponse to Response mapping includes copying the ClientResponse's input stream to the Response. Therefore, the
+ * getResponse() method should not be called more than once. Furthermore, the method should not be called if the caller has already read the ClientResponse's input stream.
  *
- * If a ClientResponse was unable to be created, then a NodeResponse will store
- * the Throwable, which may be obtained by calling getThrowable().
+ * If a ClientResponse was unable to be created, then a NodeResponse will store the Throwable, which may be obtained by calling getThrowable().
  *
- * This class overrides hashCode and equals and considers two instances to be
- * equal if they have the equal NodeIdentifiers.
+ * This class overrides hashCode and equals and considers two instances to be equal if they have the equal NodeIdentifiers.
  *
  * @author unattributed
  */
@@ -145,14 +137,14 @@ public class NodeResponse {
     public int getStatus() {
         if (hasThrowable()) {
             /*
-             * since there is a throwable, there is no client input stream to 
+             * since there is a throwable, there is no client input stream to
              * worry about maintaining, so we can call getResponse() method
              */
             return getResponse().getStatus();
         } else {
             /*
              * use client response's status instead of calling getResponse().getStatus()
-             * so that we don't read the client's input stream as part of creating 
+             * so that we don't read the client's input stream as part of creating
              * the response in the getResponse() method
              */
             return clientResponse.getStatus();
@@ -160,9 +152,7 @@ public class NodeResponse {
     }
 
     /**
-     * Returns true if the response status is 2xx, false otherwise.
-     *
-     * @return
+     * @return true if the response status is 2xx, false otherwise.
      */
     public boolean is2xx() {
         final int statusCode = getStatus();
@@ -170,9 +160,7 @@ public class NodeResponse {
     }
 
     /**
-     * Returns true if the response status is 5xx, false otherwise.
-     *
-     * @return
+     * @return true if the response status is 5xx, false otherwise.
      */
     public boolean is5xx() {
         final int statusCode = getStatus();
@@ -180,8 +168,7 @@ public class NodeResponse {
     }
 
     /**
-     * Returns null if hasThrowable() is true; otherwise the client's response
-     * is returned.
+     * Returns null if hasThrowable() is true; otherwise the client's response is returned.
      *
      * The ClientResponse's input stream can only be read once.
      *
@@ -192,24 +179,18 @@ public class NodeResponse {
     }
 
     /**
-     * If this node response has been merged returns the updated entity,
-     * otherwise null. Also returns null if hasThrowable() is true. The intent
-     * of this method is to support getting the response entity when it was
-     * already consumed during the merge operation. In this case the client
-     * response rom getClientResponse() will not support a getEntity(...) or
-     * getEntityInputStream() call.
+     * If this node response has been merged returns the updated entity, otherwise null. Also returns null if hasThrowable() is true. The intent of this method is to support getting the response
+     * entity when it was already consumed during the merge operation. In this case the client response rom getClientResponse() will not support a getEntity(...) or getEntityInputStream() call.
      *
-     * @return
+     * @return If this node response has been merged returns the updated entity, otherwise null. Also returns null if hasThrowable() is true
      */
     public Entity getUpdatedEntity() {
         return updatedEntity;
     }
 
     /**
-     * Creates a Response by mapping the ClientResponse values to it. Since the
-     * ClientResponse's input stream can only be read once, this method should
-     * only be called once. Furthermore, the caller should not have already read
-     * the ClientResponse's input stream.
+     * Creates a Response by mapping the ClientResponse values to it. Since the ClientResponse's input stream can only be read once, this method should only be called once. Furthermore, the caller
+     * should not have already read the ClientResponse's input stream.
      *
      * @return the response
      */
@@ -232,11 +213,9 @@ public class NodeResponse {
     }
 
     /**
-     * Returns true if a throwable was thrown and a response was not able to be
-     * created; false otherwise.
+     * Returns true if a throwable was thrown and a response was not able to be created; false otherwise.
      *
-     * @return true if a throwable was thrown and a response was not able to be
-     * created; false otherwise
+     * @return true if a throwable was thrown and a response was not able to be created; false otherwise
      */
     public boolean hasThrowable() {
         return getThrowable() != null;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/ConnectingNodeMutableRequestException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/ConnectingNodeMutableRequestException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/ConnectingNodeMutableRequestException.java
index 365b5f0..964cdfc 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/ConnectingNodeMutableRequestException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/ConnectingNodeMutableRequestException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a HTTP request that may change a node's
- * dataflow is to be replicated while a node is connecting to the cluster.
+ * Represents the exceptional case when a HTTP request that may change a node's dataflow is to be replicated while a node is connecting to the cluster.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/DisconnectedNodeMutableRequestException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/DisconnectedNodeMutableRequestException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/DisconnectedNodeMutableRequestException.java
index 412a555..e0f3433 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/DisconnectedNodeMutableRequestException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/DisconnectedNodeMutableRequestException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a HTTP request that may change a node's
- * dataflow is to be replicated while one or more nodes are disconnected.
+ * Represents the exceptional case when a HTTP request that may change a node's dataflow is to be replicated while one or more nodes are disconnected.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalClusterStateException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalClusterStateException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalClusterStateException.java
index 6c4e670..b3d2826 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalClusterStateException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalClusterStateException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Signals that an operation to be performed on a cluster has been invoked at an
- * illegal or inappropriate time.
+ * Signals that an operation to be performed on a cluster has been invoked at an illegal or inappropriate time.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDeletionException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDeletionException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDeletionException.java
index adef62a..3e1c031 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDeletionException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDeletionException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a deletion request is issued to a node
- * that cannot be deleted (e.g., the node is not disconnected).
+ * Represents the exceptional case when a deletion request is issued to a node that cannot be deleted (e.g., the node is not disconnected).
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDisconnectionException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDisconnectionException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDisconnectionException.java
index 7e61b24..71fe044 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDisconnectionException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeDisconnectionException.java
@@ -17,9 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a disconnection request is issued to a
- * node that cannot be disconnected (e.g., last node in cluster, node is primary
- * node).
+ * Represents the exceptional case when a disconnection request is issued to a node that cannot be disconnected (e.g., last node in cluster, node is primary node).
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeReconnectionException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeReconnectionException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeReconnectionException.java
index 96c76bc..63242d0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeReconnectionException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IllegalNodeReconnectionException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a reconnection request is issued to a
- * node that cannot be reconnected (e.g., the node is not disconnected).
+ * Represents the exceptional case when a reconnection request is issued to a node that cannot be reconnected (e.g., the node is not disconnected).
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IneligiblePrimaryNodeException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IneligiblePrimaryNodeException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IneligiblePrimaryNodeException.java
index 4b0097a..a224078 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IneligiblePrimaryNodeException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/IneligiblePrimaryNodeException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when the primary role cannot be assigned to a
- * node because the node is ineligible for the role.
+ * Represents the exceptional case when the primary role cannot be assigned to a node because the node is ineligible for the role.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/MutableRequestException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/MutableRequestException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/MutableRequestException.java
index d160587..2c0ba5f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/MutableRequestException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/MutableRequestException.java
@@ -17,9 +17,8 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a HTTP request that may change a node's
- * state is to be replicated while the cluster or connected nodes are unable to
- * change their state (e.g., a new node is connecting to the cluster).
+ * Represents the exceptional case when a HTTP request that may change a node's state is to be replicated while the cluster or connected nodes are unable to change their state (e.g., a new node is
+ * connecting to the cluster).
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoConnectedNodesException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoConnectedNodesException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoConnectedNodesException.java
index 8d704b9..b350015 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoConnectedNodesException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoConnectedNodesException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when the cluster is unable to service a
- * request because no nodes are connected.
+ * Represents the exceptional case when the cluster is unable to service a request because no nodes are connected.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoResponseFromNodesException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoResponseFromNodesException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoResponseFromNodesException.java
index 9e17a23..6b03c74 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoResponseFromNodesException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NoResponseFromNodesException.java
@@ -17,9 +17,8 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when the cluster is unable to service a
- * request because no nodes returned a response. When the given request is not
- * mutable the nodes are left in their previous state.
+ * Represents the exceptional case when the cluster is unable to service a request because no nodes returned a response. When the given request is not mutable the nodes are left in their previous
+ * state.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NodeDisconnectionException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NodeDisconnectionException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NodeDisconnectionException.java
index 3bd2f4b..b2102ff 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NodeDisconnectionException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/NodeDisconnectionException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a disconnection request to a node
- * failed.
+ * Represents the exceptional case when a disconnection request to a node failed.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/PrimaryRoleAssignmentException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/PrimaryRoleAssignmentException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/PrimaryRoleAssignmentException.java
index 403f7a5..0fbaebc 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/PrimaryRoleAssignmentException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/PrimaryRoleAssignmentException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when the cluster is unable to update the
- * primary role of a node.
+ * Represents the exceptional case when the cluster is unable to update the primary role of a node.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/SafeModeMutableRequestException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/SafeModeMutableRequestException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/SafeModeMutableRequestException.java
index f544f26..03710f5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/SafeModeMutableRequestException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/SafeModeMutableRequestException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a HTTP request that may change a node's
- * dataflow is to be replicated while the cluster is in safe mode.
+ * Represents the exceptional case when a HTTP request that may change a node's dataflow is to be replicated while the cluster is in safe mode.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UnknownNodeException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UnknownNodeException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UnknownNodeException.java
index 914bb56..d2070d1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UnknownNodeException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UnknownNodeException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a request is made for a node that does
- * not exist.
+ * Represents the exceptional case when a request is made for a node that does not exist.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/21209b23/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UriConstructionException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UriConstructionException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UriConstructionException.java
index 773d7b5..27b5312 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UriConstructionException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-cluster/src/main/java/org/apache/nifi/cluster/manager/exception/UriConstructionException.java
@@ -17,9 +17,7 @@
 package org.apache.nifi.cluster.manager.exception;
 
 /**
- * Represents the exceptional case when a URI cannot be constructed from the
- * given information. This exception is similar to Java's URISyntaxException
- * except that it extends RuntimeException.
+ * Represents the exceptional case when a URI cannot be constructed from the given information. This exception is similar to Java's URISyntaxException except that it extends RuntimeException.
  *
  * @author unattributed
  */


[30/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/1eb4387d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/1eb4387d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/1eb4387d

Branch: refs/heads/NIFI-292
Commit: 1eb4387dbe8964862316d9a2ce2e999651e2124b
Parents: e811929
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 21:29:06 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 21:29:06 2015 -0400

----------------------------------------------------------------------
 .../nifi-file-authorization-provider/pom.xml    |  8 ++
 .../FileAuthorizationProvider.java              | 90 +-------------------
 .../FileAuthorizationProviderTest.java          | 63 +++++++-------
 3 files changed, 41 insertions(+), 120 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/1eb4387d/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/pom.xml b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/pom.xml
index d79b0ca..b5cde8d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/pom.xml
@@ -49,6 +49,14 @@
                     <generateDirectory>${project.build.directory}/generated-sources/jaxb</generateDirectory>
                 </configuration>
             </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+                <configuration>
+                    <excludes>**/user/generated/*.java</excludes>
+                </configuration>
+            </plugin>            
+
         </plugins>
     </build>
     <dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/1eb4387d/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/main/java/org/apache/nifi/authorization/FileAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/main/java/org/apache/nifi/authorization/FileAuthorizationProvider.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/main/java/org/apache/nifi/authorization/FileAuthorizationProvider.java
index 5657369..9c2cad5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/main/java/org/apache/nifi/authorization/FileAuthorizationProvider.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/main/java/org/apache/nifi/authorization/FileAuthorizationProvider.java
@@ -167,23 +167,10 @@ public class FileAuthorizationProvider implements AuthorityProvider {
     public void preDestruction() {
     }
 
-    /**
-     * Determines if this provider has a default role.
-     *
-     * @return
-     */
     private boolean hasDefaultRoles() {
         return !defaultAuthorities.isEmpty();
     }
 
-    /**
-     * Determines if the specified dn is known to this authority provider. When
-     * this provider is configured to have default role(s), all dn are
-     * considered to exist.
-     *
-     * @param dn
-     * @return True if he dn is known, false otherwise
-     */
     @Override
     public boolean doesDnExist(String dn) throws AuthorityAccessException {
         if (hasDefaultRoles()) {
@@ -194,21 +181,11 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         return user != null;
     }
 
-    /**
-     * Loads the authorities for the specified user. If this provider is
-     * configured for default user role(s) and a non existent dn is specified, a
-     * new user will be automatically created with the default role(s).
-     *
-     * @param dn
-     * @return
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
-     */
     @Override
     public synchronized Set<Authority> getAuthorities(String dn) throws UnknownIdentityException, AuthorityAccessException {
         final Set<Authority> authorities = EnumSet.noneOf(Authority.class);
 
-        // get the user 
+        // get the user
         final User user = getUser(dn);
 
         // ensure the user was located
@@ -234,16 +211,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         return authorities;
     }
 
-    /**
-     * Adds the specified authorities to the specified user. Regardless of
-     * whether this provider is configured for a default user role, when a non
-     * existent dn is specified, an UnknownIdentityException will be thrown.
-     *
-     * @param dn
-     * @param authorities
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
-     */
     @Override
     public synchronized void setAuthorities(String dn, Set<Authority> authorities) throws UnknownIdentityException, AuthorityAccessException {
         // get the user
@@ -265,12 +232,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         }
     }
 
-    /**
-     * Adds the specified authorities to the specified user.
-     *
-     * @param user
-     * @param authorities
-     */
     private void setUserAuthorities(final User user, final Set<Authority> authorities) {
         // clear the existing rules
         user.getRole().clear();
@@ -286,15 +247,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         }
     }
 
-    /**
-     * Adds the specified user. If this provider is configured with default
-     * role(s) they will be added to the new user.
-     *
-     * @param dn
-     * @param group
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
-     */
     @Override
     public synchronized void addUser(String dn, String group) throws IdentityAlreadyExistsException, AuthorityAccessException {
         final User user = getUser(dn);
@@ -334,13 +286,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         }
     }
 
-    /**
-     * Gets the users for the specified authority.
-     *
-     * @param authority
-     * @return
-     * @throws AuthorityAccessException
-     */
     @Override
     public synchronized Set<String> getUsers(Authority authority) throws AuthorityAccessException {
         final Set<String> userSet = new HashSet<>();
@@ -354,15 +299,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         return userSet;
     }
 
-    /**
-     * Removes the specified user. Regardless of whether this provider is
-     * configured for a default user role, when a non existent dn is specified,
-     * an UnknownIdentityException will be thrown.
-     *
-     * @param dn
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException
-     */
     @Override
     public synchronized void revokeUser(String dn) throws UnknownIdentityException, AuthorityAccessException {
         // get the user
@@ -496,24 +432,12 @@ public class FileAuthorizationProvider implements AuthorityProvider {
 
     /**
      * Grants access to download content regardless of FlowFile attributes.
-     * 
-     * @param dnChain
-     * @param attributes
-     * @return
-     * @throws UnknownIdentityException
-     * @throws AuthorityAccessException 
      */
     @Override
     public DownloadAuthorization authorizeDownload(List<String> dnChain, Map<String, String> attributes) throws UnknownIdentityException, AuthorityAccessException {
         return DownloadAuthorization.approved();
     }
 
-    /**
-     * Locates the user with the specified DN.
-     *
-     * @param dn
-     * @return
-     */
     private User getUser(String dn) throws UnknownIdentityException {
         // ensure the DN was specified
         if (dn == null) {
@@ -532,13 +456,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         return desiredUser;
     }
 
-    /**
-     * Locates all users that are part of the specified group.
-     *
-     * @param group
-     * @return
-     * @throws UnknownIdentityException
-     */
     private Collection<User> getUserGroup(String group) throws UnknownIdentityException {
         // ensure the DN was specified
         if (group == null) {
@@ -559,11 +476,6 @@ public class FileAuthorizationProvider implements AuthorityProvider {
         return userGroup;
     }
 
-    /**
-     * Saves the users file.
-     *
-     * @throws Exception
-     */
     private void save() throws Exception {
         final Marshaller marshaller = JAXB_CONTEXT.createMarshaller();
         marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/1eb4387d/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/test/java/org/apache/nifi/authorization/FileAuthorizationProviderTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/test/java/org/apache/nifi/authorization/FileAuthorizationProviderTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/test/java/org/apache/nifi/authorization/FileAuthorizationProviderTest.java
index d02d4d7..7428500 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/test/java/org/apache/nifi/authorization/FileAuthorizationProviderTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-file-authorization-provider/src/test/java/org/apache/nifi/authorization/FileAuthorizationProviderTest.java
@@ -23,105 +23,106 @@ import org.apache.nifi.authorization.exception.ProviderCreationException;
 import org.apache.nifi.util.file.FileUtils;
 import org.apache.nifi.util.NiFiProperties;
 import org.junit.After;
+import static org.junit.Assert.assertEquals;
 import org.junit.Before;
 import org.junit.Test;
-import static org.mockito.Mockito.*;
-import static org.junit.Assert.*;
 import org.junit.Ignore;
 import org.mockito.Mockito;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 @Ignore
 public class FileAuthorizationProviderTest {
-    
+
     private FileAuthorizationProvider provider;
-    
+
     private File primary;
-    
+
     private File restore;
-    
+
     private NiFiProperties mockProperties;
-    
+
     private AuthorityProviderConfigurationContext mockConfigurationContext;
-    
+
     @Before
     public void setup() throws IOException {
-        
+
         primary = new File("target/primary/users.txt");
         restore = new File("target/restore/users.txt");
-        
+
         System.out.println("absolute path: " + primary.getAbsolutePath());
-        
+
         mockProperties = mock(NiFiProperties.class);
         when(mockProperties.getRestoreDirectory()).thenReturn(restore.getParentFile());
-        
+
         mockConfigurationContext = mock(AuthorityProviderConfigurationContext.class);
         when(mockConfigurationContext.getProperty(Mockito.eq("Authorized Users File"))).thenReturn(primary.getPath());
-        
+
         provider = new FileAuthorizationProvider();
         provider.setNiFiProperties(mockProperties);
         provider.initialize(null);
-    }     
-    
+    }
+
     @After
     public void cleanup() throws Exception {
         deleteFile(primary);
         deleteFile(restore);
     }
-    
+
     private boolean deleteFile(final File file) {
-        if(file.isDirectory()) {
+        if (file.isDirectory()) {
             FileUtils.deleteFilesInDir(file, null, null, true, true);
         }
         return FileUtils.deleteFile(file, null, 10);
     }
-    
+
     @Test
     public void testPostContructionWhenRestoreDoesNotExist() throws Exception {
-        
+
         byte[] primaryBytes = "<users/>".getBytes();
         FileOutputStream fos = new FileOutputStream(primary);
         fos.write(primaryBytes);
         fos.close();
-        
+
         provider.onConfigured(mockConfigurationContext);
         assertEquals(primary.length(), restore.length());
     }
-    
+
     @Test
     public void testPostContructionWhenPrimaryDoesNotExist() throws Exception {
-        
+
         byte[] restoreBytes = "<users/>".getBytes();
         FileOutputStream fos = new FileOutputStream(restore);
         fos.write(restoreBytes);
         fos.close();
-        
+
         provider.onConfigured(mockConfigurationContext);
         assertEquals(restore.length(), primary.length());
-        
+
     }
-    
+
     @Test(expected = ProviderCreationException.class)
     public void testPostContructionWhenPrimaryDifferentThanRestore() throws Exception {
-        
+
         byte[] primaryBytes = "<users></users>".getBytes();
         FileOutputStream fos = new FileOutputStream(primary);
         fos.write(primaryBytes);
         fos.close();
-        
+
         byte[] restoreBytes = "<users/>".getBytes();
         fos = new FileOutputStream(restore);
         fos.write(restoreBytes);
         fos.close();
-        
+
         provider.onConfigured(mockConfigurationContext);
     }
-    
+
     @Test
     public void testPostContructionWhenPrimaryAndBackupDoNotExist() throws Exception {
-        
+
         provider.onConfigured(mockConfigurationContext);
         assertEquals(0, restore.length());
         assertEquals(restore.length(), primary.length());
     }
-    
+
 }


[45/50] [abbrv] incubator-nifi git commit: NIFI-292: - Annotating endpoints using swagger. - Started building the template for the REST documentation.

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
index 9e34201..821ca2f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
@@ -17,6 +17,12 @@
 package org.apache.nifi.web.api;
 
 import com.sun.jersey.api.core.ResourceContext;
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 
 import java.net.URI;
 import java.util.HashMap;
@@ -81,6 +87,10 @@ import org.springframework.security.access.prepost.PreAuthorize;
  * RESTful endpoint for managing a Flow Controller.
  */
 @Path("/controller")
+@Api(
+        value = "/controller",
+        description = "Provides realtime command and control of this NiFi instance"
+)
 public class ControllerResource extends ApplicationResource {
 
     private NiFiServiceFacade serviceFacade;
@@ -96,6 +106,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the Provenance sub-resource
      */
     @Path("/provenance")
+    @ApiOperation(
+            value = "Gets the provenance resource",
+            response = ProvenanceResource.class
+    )
     public ProvenanceResource getProvenanceResource() {
         return resourceContext.getResource(ProvenanceResource.class);
     }
@@ -106,6 +120,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the User sub-resource
      */
     @Path("/users")
+    @ApiOperation(
+            value = "Gets the user resource",
+            response = UserResource.class
+    )
     public UserResource getUserResource() {
         return resourceContext.getResource(UserResource.class);
     }
@@ -116,6 +134,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the User sub-resource
      */
     @Path("/user-groups")
+    @ApiOperation(
+            value = "Gets the user group resource",
+            response = UserGroupResource.class
+    )
     public UserGroupResource getUserGroupResource() {
         return resourceContext.getResource(UserGroupResource.class);
     }
@@ -126,6 +148,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the History sub-resource
      */
     @Path("/history")
+    @ApiOperation(
+            value = "Gets the history resource",
+            response = HistoryResource.class
+    )
     public HistoryResource getHistoryResource() {
         return resourceContext.getResource(HistoryResource.class);
     }
@@ -136,6 +162,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the History sub-resource
      */
     @Path("/bulletin-board")
+    @ApiOperation(
+            value = "Gets the bulletin board resource",
+            response = BulletinBoardResource.class
+    )
     public BulletinBoardResource getBulletinBoardResource() {
         return resourceContext.getResource(BulletinBoardResource.class);
     }
@@ -146,6 +176,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the Template sub-resource
      */
     @Path("/templates")
+    @ApiOperation(
+            value = "Gets the template resource",
+            response = TemplateResource.class
+    )
     public TemplateResource getTemplateResource() {
         return resourceContext.getResource(TemplateResource.class);
     }
@@ -156,6 +190,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the Snippets sub-resource
      */
     @Path("/snippets")
+    @ApiOperation(
+            value = "Gets the snippet resource",
+            response = SnippetResource.class
+    )
     public SnippetResource getSnippetResource() {
         return resourceContext.getResource(SnippetResource.class);
     }
@@ -166,6 +204,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the Controller Services sub-resource
      */
     @Path("/controller-services")
+    @ApiOperation(
+            value = "Gets the controller service resource",
+            response = ControllerServiceResource.class
+    )
     public ControllerServiceResource getControllerServiceResource() {
         return resourceContext.getResource(ControllerServiceResource.class);
     }
@@ -176,6 +218,10 @@ public class ControllerResource extends ApplicationResource {
      * @return the Reporting Tasks sub-resource
      */
     @Path("/reporting-tasks")
+    @ApiOperation(
+            value = "Gets the reporting task resource",
+            response = ReportingTaskResource.class
+    )
     public ReportingTaskResource getReportingTaskResource() {
         return resourceContext.getResource(ReportingTaskResource.class);
     }
@@ -187,7 +233,16 @@ public class ControllerResource extends ApplicationResource {
      * @return the Group sub-resource
      */
     @Path("/process-groups/{process-group-id}")
-    public ProcessGroupResource getGroupResource(@PathParam("process-group-id") String groupId) {
+    @ApiOperation(
+            value = "Gets the process group resource",
+            response = ProcessGroupResource.class
+    )
+    public ProcessGroupResource getGroupResource(
+            @ApiParam(
+                    value = "The id of the process group that is the parent of the requested resource(s).",
+                    required = true
+            )
+            @PathParam("process-group-id") String groupId) {
         ProcessGroupResource groupResource = resourceContext.getResource(ProcessGroupResource.class);
         groupResource.setGroupId(groupId);
         return groupResource;
@@ -215,10 +270,29 @@ public class ControllerResource extends ApplicationResource {
      * @return A controllerEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @PreAuthorize("hasRole('ROLE_NIFI')")
     @TypeHint(ControllerEntity.class)
-    public Response getController(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Returns the details about this NiFi necessary to communicate via site to site",
+            response = ControllerEntity.class,
+            authorizations = @Authorization(value = "NiFi", type = "ROLE_NIFI")
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getController(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         if (properties.isClusterManager()) {
             return clusterManager.applyRequest(HttpMethod.GET, getAbsolutePath(), getRequestParameters(true), getHeaders()).getResponse();
@@ -247,10 +321,28 @@ public class ControllerResource extends ApplicationResource {
      * @return A searchResultsEntity
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/search-results")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(SearchResultsEntity.class)
+    @ApiOperation(
+            value = "Performs a search against this NiFi using the specified search term",
+            response = SearchResultsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response searchController(@QueryParam("q") @DefaultValue(StringUtils.EMPTY) String value) {
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -285,9 +377,36 @@ public class ControllerResource extends ApplicationResource {
     @Path("/archive")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessGroupEntity.class)
+    @ApiOperation(
+            value = "Creates a new archive of this NiFi flow configuration",
+            notes = "This POST operation returns a URI that is not representative of the thing "
+                    + "that was actually created. The archive that is created cannot be referenced "
+                    + "at a later time, therefore there is no corresponding URI. Instead the "
+                    + "request URI is returned.",
+            response = ProcessGroupEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createArchive(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow",
+                    required = true
+            )
             @FormParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
             @FormParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
@@ -331,10 +450,28 @@ public class ControllerResource extends ApplicationResource {
      * @return A revisionEntity
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/revision")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(Entity.class)
+    @ApiOperation(
+            value = "Gets the current revision of this NiFi",
+            response = Entity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getRevision() {
         // create the current revision
         final RevisionDTO revision = serviceFacade.getRevision();
@@ -354,11 +491,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A controllerStatusEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ControllerStatusEntity.class)
-    public Response getControllerStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets the current status of this NiFi",
+            response = Entity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getControllerStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         final ControllerStatusDTO controllerStatus = serviceFacade.getControllerStatus();
 
@@ -382,11 +542,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A countersEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/counters")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(CountersEntity.class)
-    public Response getCounters(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets the current counters for this NiFi",
+            response = Entity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getCounters(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         final CountersDTO countersReport = serviceFacade.getCounters();
 
@@ -412,12 +595,33 @@ public class ControllerResource extends ApplicationResource {
      * @return A counterEntity.
      */
     @PUT
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/counters/{id}")
-    @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
+    @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(CounterEntity.class)
+    @ApiOperation(
+            value = "Updates the specified counter. This will reset the counter value to 0",
+            response = CounterEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateCounter(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
             @FormParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @PathParam("id") String id) {
 
@@ -455,11 +659,36 @@ public class ControllerResource extends ApplicationResource {
      * @return A controllerConfigurationEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/config")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN', 'ROLE_NIFI')")
     @TypeHint(ControllerConfigurationEntity.class)
-    public Response getControllerConfig(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the configuration for this NiFi",
+            response = ControllerConfigurationEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN"),
+                @Authorization(value = "ROLE_NIFI", type = "ROLE_NIFI")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getControllerConfig(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+
         // replicate if cluster manager
         if (properties.isClusterManager()) {
             return clusterManager.applyRequest(HttpMethod.GET, getAbsolutePath(), getRequestParameters(true), getHeaders()).getResponse();
@@ -551,9 +780,27 @@ public class ControllerResource extends ApplicationResource {
     @Path("/config")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ControllerConfigurationEntity.class)
+    @ApiOperation(
+            value = "Retrieves the configuration for this NiFi",
+            response = ControllerConfigurationEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateControllerConfig(
             @Context HttpServletRequest httpServletRequest,
-            ControllerConfigurationEntity configEntity) {
+            @ApiParam(
+                    value = "The controller configuration",
+                    required = true
+            ) ControllerConfigurationEntity configEntity) {
 
         if (configEntity == null || configEntity.getConfig() == null) {
             throw new IllegalArgumentException("Controller configuration must be specified");
@@ -606,11 +853,35 @@ public class ControllerResource extends ApplicationResource {
      * @return A authoritiesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/authorities")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(AuthorityEntity.class)
-    public Response getAuthorities(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the user details, including the authorities, about the user making the request",
+            response = AuthorityEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getAuthorities(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+
         // note that the cluster manager will handle this request directly
         final NiFiUser user = NiFiUserUtils.getNiFiUser();
         if (user == null) {
@@ -638,11 +909,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A bannerEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/banners")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(BannerEntity.class)
-    public Response getBanners(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the banners for this NiFi",
+            response = BannerEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getBanners(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -676,11 +970,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A processorTypesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/processor-types")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ProcessorTypesEntity.class)
-    public Response getProcessorTypes(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the types of processors that this NiFi supports",
+            response = ProcessorTypesEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessorTypes(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -708,12 +1025,38 @@ public class ControllerResource extends ApplicationResource {
      * @return A controllerServicesTypesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/controller-service-types")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ControllerServiceTypesEntity.class)
+    @ApiOperation(
+            value = "Retrieves the types of controller services that this NiFi supports",
+            response = ControllerServiceTypesEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getControllerServiceTypes(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "If specified, will only return controller services of this type",
+                    required = false
+            )
             @QueryParam("serviceType") String serviceType) {
 
         // replicate if cluster manager
@@ -741,11 +1084,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A controllerServicesTypesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/reporting-task-types")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ReportingTaskTypesEntity.class)
-    public Response getReportingTaskTypes(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the types of reporting tasks that this NiFi supports",
+            response = ReportingTaskTypesEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getReportingTaskTypes(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -772,11 +1138,34 @@ public class ControllerResource extends ApplicationResource {
      * @return A prioritizerTypesEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/prioritizers")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(PrioritizerTypesEntity.class)
-    public Response getPrioritizers(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves the types of prioritizers that this NiFi supports",
+            response = PrioritizerTypesEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getPrioritizers(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -803,11 +1192,34 @@ public class ControllerResource extends ApplicationResource {
      * @return An aboutEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/about")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(AboutEntity.class)
-    public Response getAboutInfo(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Retrieves details about this NiFi to put in the About dialog",
+            response = AboutEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getAboutInfo(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -835,6 +1247,7 @@ public class ControllerResource extends ApplicationResource {
     }
 
     // setters
+    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
index 90d031d..f9bfda3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -74,6 +75,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Controller Service.
  */
+@Api(hidden = true)
 public class ControllerServiceResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(ControllerServiceResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
index fd97dca..73742e7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -61,6 +62,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Funnel.
  */
+@Api(hidden = true)
 public class FunnelResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(FunnelResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
index 749863c..c334b37 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import javax.ws.rs.DELETE;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
@@ -44,6 +45,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for querying the history of this Controller.
  */
+@Api(hidden = true)
 public class HistoryResource extends ApplicationResource {
 
     private NiFiServiceFacade serviceFacade;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
index 4e446fb..4c6b313 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -63,6 +64,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing an Input Port.
  */
+@Api(hidden = true)
 public class InputPortResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(InputPortResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
index 6b12d0e..ca897ee 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -63,6 +64,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Label.
  */
+@Api(hidden = true)
 public class LabelResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(LabelResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
index bb0eba9..0b2fe12 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
 import javax.ws.rs.DefaultValue;
@@ -46,6 +47,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a cluster connection.
  */
+@Api(hidden = true)
 public class NodeResource extends ApplicationResource {
 
     private NiFiServiceFacade serviceFacade;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
index a600d35..68b6b2c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -63,6 +64,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing an Output Port.
  */
+@Api(hidden = true)
 public class OutputPortResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(OutputPortResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
index 2b3657e..73e2d66 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
@@ -17,6 +17,11 @@
 package org.apache.nifi.web.api;
 
 import com.sun.jersey.api.core.ResourceContext;
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -68,6 +73,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Group.
  */
+@Api(hidden = true)
 public class ProcessGroupResource extends ApplicationResource {
 
     private static final String VERBOSE = "false";
@@ -87,6 +93,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the processor resource within the specified group
      */
     @Path("processors")
+    @ApiOperation(
+            value = "Gets the processor resource",
+            response = ProcessorResource.class
+    )
     public ProcessorResource getProcessorResource() {
         ProcessorResource processorResource = resourceContext.getResource(ProcessorResource.class);
         processorResource.setGroupId(groupId);
@@ -99,6 +109,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the connection sub-resource within the specified group
      */
     @Path("connections")
+    @ApiOperation(
+            value = "Gets the connection resource",
+            response = ConnectionResource.class
+    )
     public ConnectionResource getConnectionResource() {
         ConnectionResource connectionResource = resourceContext.getResource(ConnectionResource.class);
         connectionResource.setGroupId(groupId);
@@ -111,6 +125,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the input ports sub-resource within the specified group
      */
     @Path("input-ports")
+    @ApiOperation(
+            value = "Gets the input port resource",
+            response = InputPortResource.class
+    )
     public InputPortResource getInputPortResource() {
         InputPortResource inputPortResource = resourceContext.getResource(InputPortResource.class);
         inputPortResource.setGroupId(groupId);
@@ -123,6 +141,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the output ports sub-resource within the specified group
      */
     @Path("output-ports")
+    @ApiOperation(
+            value = "Gets the output port resource",
+            response = OutputPortResource.class
+    )
     public OutputPortResource getOutputPortResource() {
         OutputPortResource outputPortResource = resourceContext.getResource(OutputPortResource.class);
         outputPortResource.setGroupId(groupId);
@@ -135,6 +157,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the label sub-resource within the specified group
      */
     @Path("labels")
+    @ApiOperation(
+            value = "Gets the label resource",
+            response = LabelResource.class
+    )
     public LabelResource getLabelResource() {
         LabelResource labelResource = resourceContext.getResource(LabelResource.class);
         labelResource.setGroupId(groupId);
@@ -147,6 +173,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the funnel sub-resource within the specified group
      */
     @Path("funnels")
+    @ApiOperation(
+            value = "Gets the funnel resource",
+            response = FunnelResource.class
+    )
     public FunnelResource getFunnelResource() {
         FunnelResource funnelResource = resourceContext.getResource(FunnelResource.class);
         funnelResource.setGroupId(groupId);
@@ -159,6 +189,10 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return the remote process group sub-resource within the specified group
      */
     @Path("remote-process-groups")
+    @ApiOperation(
+            value = "Gets the remote process group resource",
+            response = RemoteProcessGroupResource.class
+    )
     public RemoteProcessGroupResource getRemoteProcessGroupResource() {
         RemoteProcessGroupResource remoteProcessGroupResource = resourceContext.getResource(RemoteProcessGroupResource.class);
         remoteProcessGroupResource.setGroupId(groupId);
@@ -242,9 +276,29 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A processGroupEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ProcessGroupEntity.class)
+    @ApiOperation(
+            value = "Gets the specified process group",
+            response = ProcessGroupEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getProcessGroup(
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
             @QueryParam("recursive") @DefaultValue(RECURSIVE) Boolean recursive,
@@ -297,6 +351,22 @@ public class ProcessGroupResource extends ApplicationResource {
     @Path("/snippet-instance")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(FlowSnippetEntity.class)
+    @ApiOperation(
+            value = "Creates a new flow snippet",
+            response = FlowSnippetEntity.class,
+            authorizations = {
+                @Authorization(value = "ROLE_DFM", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response copySnippet(
             @Context HttpServletRequest httpServletRequest,
             @FormParam(VERSION) LongParameter version,
@@ -441,6 +511,7 @@ public class ProcessGroupResource extends ApplicationResource {
     @PUT
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessGroupEntity.class)
     public Response updateProcessGroup(
@@ -481,6 +552,7 @@ public class ProcessGroupResource extends ApplicationResource {
     @PUT
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessGroupEntity.class)
     public Response updateProcessGroup(
@@ -549,6 +621,7 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A processGroupEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/process-group-references/{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
@@ -597,6 +670,7 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A controllerEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/process-group-references")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
@@ -904,6 +978,7 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A processGroupEntity.
      */
     @DELETE
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/process-group-references/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
@@ -956,6 +1031,7 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A processGroupStatusEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN', 'ROLE_NIFI')")
@@ -999,6 +1075,7 @@ public class ProcessGroupResource extends ApplicationResource {
      * @return A processorEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
index 16c7e19..7cd544a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
@@ -16,6 +16,12 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Arrays;
@@ -83,6 +89,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Processor.
  */
+@Api(hidden = true)
 public class ProcessorResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(ProcessorResource.class);
@@ -149,8 +156,27 @@ public class ProcessorResource extends ApplicationResource {
      */
     @GET
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ProcessorsEntity.class)
+    @ApiOperation(
+            value = "Gets all processors",
+            response = ProcessorsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getProcessors(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
@@ -189,6 +215,7 @@ public class ProcessorResource extends ApplicationResource {
     @POST
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessorEntity.class)
     public Response createProcessor(
@@ -236,10 +263,31 @@ public class ProcessorResource extends ApplicationResource {
     @POST
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Path("") // necessary due to bug in swagger
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessorEntity.class)
+    @ApiOperation(
+            value = "Creates a new processor",
+            response = ProcessorEntity.class,
+            authorizations = {
+                @Authorization(value = "ROLE_DFM", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createProcessor(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The processor configuration details",
+                    required = true
+            )
             ProcessorEntity processorEntity) {
 
         if (processorEntity == null || processorEntity.getProcessor() == null) {
@@ -324,6 +372,24 @@ public class ProcessorResource extends ApplicationResource {
     @Path("/{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ProcessorEntity.class)
+    @ApiOperation(
+            value = "Gets the specified processor",
+            response = ProcessorEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response getProcessor(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
 
         // replicate if cluster manager
@@ -675,10 +741,38 @@ public class ProcessorResource extends ApplicationResource {
     @Path("/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ProcessorEntity.class)
+    @ApiOperation(
+            value = "Deletes the specified processor",
+            response = ProcessorEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response deleteProcessor(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow",
+                    required = false
+            )
             @QueryParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The processor id",
+                    required = true
+            )
             @PathParam("id") String id) {
 
         // replicate if cluster manager

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
index 4bfe3a0..574ec3d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -85,6 +86,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for querying data provenance.
  */
+@Api(hidden = true)
 public class ProvenanceResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(ProvenanceResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
index c506b9b..0c70c3e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -69,6 +70,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Remote group.
  */
+@Api(hidden = true)
 public class RemoteProcessGroupResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(RemoteProcessGroupResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
index 8aea04c..293b7b3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -70,6 +71,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Reporting Task.
  */
+@Api(hidden = true)
 public class ReportingTaskResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(ReportingTaskResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
index 997fe4a..fb2ea01 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
@@ -17,6 +17,7 @@
 package org.apache.nifi.web.api;
 
 import com.sun.jersey.api.core.ResourceContext;
+import com.wordnik.swagger.annotations.Api;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.HashMap;
@@ -61,6 +62,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Snippet.
  */
+@Api(hidden = true)
 public class SnippetResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(SnippetResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
index f747c47..dd91872 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
@@ -40,6 +41,10 @@ import org.springframework.security.access.prepost.PreAuthorize;
  * RESTful endpoint for retrieving system diagnostics.
  */
 @Path("/system-diagnostics")
+@Api(
+        value = "/system-diagnostics",
+        description = "Provides diagnostics for the system NiFi is running on"
+)
 public class SystemDiagnosticsResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(SystemDiagnosticsResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
index 00707be..7dd265e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
@@ -17,6 +17,7 @@
 package org.apache.nifi.web.api;
 
 import com.sun.jersey.multipart.FormDataParam;
+import com.wordnik.swagger.annotations.Api;
 import java.io.InputStream;
 import java.net.URI;
 import java.util.Date;
@@ -61,6 +62,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Template.
  */
+@Api(hidden = true)
 public class TemplateResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(TemplateResource.class);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
index f7b2009..2ea2c5d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -50,6 +51,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing this Controller's user groups.
  */
+@Api(hidden = true)
 public class UserGroupResource extends ApplicationResource {
 
     /*

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
index 6dbb1a7..df8d85f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -62,6 +63,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing this Controller's users.
  */
+@Api(hidden = true)
 public class UserResource extends ApplicationResource {
 
     /*

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/endpoint.hbs
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/endpoint.hbs b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/endpoint.hbs
new file mode 100644
index 0000000..6296862
--- /dev/null
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/endpoint.hbs
@@ -0,0 +1,61 @@
+{{!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+--}}
+<div class="endpoints">
+    <span class="path hidden">{{@key}}</span>
+    {{#post}}
+        <div class="endpoint post">
+            <div class="operation-handle">
+                <div class="method">POST</div>
+                <div class="path mono"></div>
+                <div class="summary">{{summary}}</div>
+                <div class="clear"></div>
+            </div>
+            {{> operation}}
+        </div>
+    {{/post}}
+    {{#get}}
+        <div class="endpoint get">
+            <div class="operation-handle">
+                <div class="method">GET</div>
+                <div class="path mono"></div>
+                <div class="summary">{{summary}}</div>
+                <div class="clear"></div>
+            </div>
+            {{> operation}}
+        </div>
+    {{/get}}
+    {{#put}}
+        <div class="endpoint put">
+            <div class="operation-handle">
+                <div class="method">PUT</div>
+                <div class="path mono"></div>
+                <div class="summary">{{summary}}</div>
+                <div class="clear"></div>
+            </div>
+            {{> operation}}
+        </div>
+    {{/put}}
+    {{#delete}}
+        <div class="endpoint delete">
+            <div class="operation-handle">
+                <div class="method">DELETE</div>
+                <div class="path mono"></div>
+                <div class="summary">{{summary}}</div>
+                <div class="clear"></div>
+            </div>
+            {{> operation}}
+        </div>
+    {{/delete}}
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/example.hbs
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/example.hbs b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/example.hbs
new file mode 100644
index 0000000..a753cc3
--- /dev/null
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/resources/templates/example.hbs
@@ -0,0 +1,16 @@
+{{!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+--}}
+{{#each properties}}    {{#ifeq type "string"}}"{{@key}}": "string"{{/ifeq}}{{#ifeq type "integer"}}"{{@key}}": 0{{/ifeq}}{{#if $ref}}"{{@key}}": <span class="nested collapsed"><span class="nested-id hidden">{{basename $ref}}</span><span class="nested-example">&#123;&#8230;&#125;</span></span>{{/if}}
+{{/each}}
\ No newline at end of file


[27/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ReflectionUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ReflectionUtils.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ReflectionUtils.java
index d7573c3..5140e31 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ReflectionUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ReflectionUtils.java
@@ -31,12 +31,9 @@ public class ReflectionUtils {
     private final static Logger LOG = LoggerFactory.getLogger(ReflectionUtils.class);
 
     /**
-     * Invokes all methods on the given instance that have been annotated with
-     * the given Annotation. If the signature of the method that is defined in
-     * <code>instance</code> uses 1 or more parameters, those parameters must be
-     * specified by the <code>args</code> parameter. However, if more arguments
-     * are supplied by the <code>args</code> parameter than needed, the extra
-     * arguments will be ignored.
+     * Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
+     * those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
+     * ignored.
      *
      * @param annotation annotation
      * @param instance instance
@@ -51,14 +48,9 @@ public class ReflectionUtils {
     }
 
     /**
-     * Invokes all methods on the given instance that have been annotated with
-     * the given preferredAnnotation and if no such method exists will invoke
-     * all methods on the given instance that have been annotated with the given
-     * alternateAnnotation, if any exists. If the signature of the method that
-     * is defined in <code>instance</code> uses 1 or more parameters, those
-     * parameters must be specified by the <code>args</code> parameter. However,
-     * if more arguments are supplied by the <code>args</code> parameter than
-     * needed, the extra arguments will be ignored.
+     * Invokes all methods on the given instance that have been annotated with the given preferredAnnotation and if no such method exists will invoke all methods on the given instance that have been
+     * annotated with the given alternateAnnotation, if any exists. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters, those parameters must be
+     * specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be ignored.
      *
      * @param preferredAnnotation preferred
      * @param alternateAnnotation alternate
@@ -134,67 +126,48 @@ public class ReflectionUtils {
     }
 
     /**
-     * Invokes all methods on the given instance that have been annotated with
-     * the given Annotation. If the signature of the method that is defined in
-     * <code>instance</code> uses 1 or more parameters, those parameters must be
-     * specified by the <code>args</code> parameter. However, if more arguments
-     * are supplied by the <code>args</code> parameter than needed, the extra
-     * arguments will be ignored.
+     * Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
+     * those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
+     * ignored.
      *
      * @param annotation annotation
      * @param instance instance
      * @param args args
-     * @return <code>true</code> if all appropriate methods were invoked and
-     * returned without throwing an Exception, <code>false</code> if one of the
-     * methods threw an Exception or could not be invoked; if <code>false</code>
-     * is returned, an error will have been logged.
+     * @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
+     * invoked; if <code>false</code> is returned, an error will have been logged.
      */
     public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation, final Object instance, final Object... args) {
         return quietlyInvokeMethodsWithAnnotation(annotation, null, instance, null, args);
     }
 
     /**
-     * Invokes all methods on the given instance that have been annotated with
-     * the given Annotation. If the signature of the method that is defined in
-     * <code>instance</code> uses 1 or more parameters, those parameters must be
-     * specified by the <code>args</code> parameter. However, if more arguments
-     * are supplied by the <code>args</code> parameter than needed, the extra
-     * arguments will be ignored.
+     * Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
+     * those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
+     * ignored.
      *
      * @param annotation annotation
      * @param instance instance
      * @param logger logger
      * @param args args
-     * @return <code>true</code> if all appropriate methods were invoked and
-     * returned without throwing an Exception, <code>false</code> if one of the
-     * methods threw an Exception or could not be invoked; if <code>false</code>
-     * is returned, an error will have been logged.
+     * @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
+     * invoked; if <code>false</code> is returned, an error will have been logged.
      */
     public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation, final Object instance, final ProcessorLog logger, final Object... args) {
         return quietlyInvokeMethodsWithAnnotation(annotation, null, instance, logger, args);
     }
 
     /**
-     * Invokes all methods on the given instance that have been annotated with
-     * the given preferredAnnotation and if no such method exists will invoke
-     * all methods on the given instance that have been annotated with the given
-     * alternateAnnotation, if any exists. If the signature of the method that
-     * is defined in <code>instance</code> uses 1 or more parameters, those
-     * parameters must be specified by the <code>args</code> parameter. However,
-     * if more arguments are supplied by the <code>args</code> parameter than
-     * needed, the extra arguments will be ignored.
+     * Invokes all methods on the given instance that have been annotated with the given preferredAnnotation and if no such method exists will invoke all methods on the given instance that have been
+     * annotated with the given alternateAnnotation, if any exists. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters, those parameters must be
+     * specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be ignored.
      *
      * @param preferredAnnotation preferred
      * @param alternateAnnotation alternate
      * @param instance instance
-     * @param logger the ProcessorLog to use for logging any errors. If null,
-     * will use own logger, but that will not generate bulletins or easily tie
-     * to the Processor's log messages.
+     * @param logger the ProcessorLog to use for logging any errors. If null, will use own logger, but that will not generate bulletins or easily tie to the Processor's log messages.
      * @param args args
-     * @return <code>true</code> if all appropriate methods were invoked and
-     * returned without throwing an Exception, <code>false</code> if one of the
-     * methods threw an Exception or could not be invoked; if <code>false</code>
-     * is returned, an error will have been logged.
+     * @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
+     * invoked; if <code>false</code> is returned, an error will have been logged.
      */
     public static boolean quietlyInvokeMethodsWithAnnotation(
             final Class<? extends Annotation> preferredAnnotation, final Class<? extends Annotation> alternateAnnotation, final Object instance, final ProcessorLog logger, final Object... args) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/SnippetUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/SnippetUtils.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/SnippetUtils.java
index 81d00e6..1521f54 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/SnippetUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/SnippetUtils.java
@@ -35,8 +35,7 @@ import org.apache.nifi.web.api.dto.PositionDTO;
 public final class SnippetUtils {
 
     /**
-     * Moves the content of the specified template around the specified
-     * location.
+     * Moves the content of the specified template around the specified location.
      *
      * @param snippet snippet
      * @param x x location
@@ -97,8 +96,7 @@ public final class SnippetUtils {
     }
 
     /**
-     * Gets all components, but not connections, that are part of the specified
-     * template.
+     * Gets all components, but not connections, that are part of the specified template.
      *
      * @param contents snippet
      * @return component dtos


[15/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index acabe08..e47d58c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -49,7 +49,11 @@ import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -58,7 +62,8 @@ import java.util.regex.Pattern;
 @SideEffectFree
 @SupportsBatching
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex"})
-@CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that matches the Regular Expression with some alternate value.")
+@CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of "
+        + "the content that matches the Regular Expression with some alternate value.")
 public class ReplaceText extends AbstractProcessor {
 
     //Constants
@@ -77,7 +82,8 @@ public class ReplaceText extends AbstractProcessor {
             .build();
     public static final PropertyDescriptor REPLACEMENT_VALUE = new PropertyDescriptor.Builder()
             .name("Replacement Value")
-            .description("The value to replace the regular expression with. Back-references to Regular Expression capturing groups are supported, but back-references that reference capturing groups that do not exist in the regular expression will be treated as literal value.")
+            .description("The value to replace the regular expression with. Back-references to Regular Expression capturing groups are supported, but "
+                    + "back-references that reference capturing groups that do not exist in the regular expression will be treated as literal value.")
             .required(true)
             .defaultValue("$1")
             .addValidator(Validator.VALID)
@@ -92,15 +98,20 @@ public class ReplaceText extends AbstractProcessor {
             .build();
     public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder()
             .name("Maximum Buffer Size")
-            .description("Specifies the maximum amount of data to buffer (per file or per line, depending on the Evaluation Mode) in order to apply the regular expressions. If 'Entire Text' (in Evaluation Mode) is selected and the FlowFile is larger than this value, the FlowFile will be routed to 'failure'. "
-                    + "In 'Line-by-Line' Mode, if a single line is larger than this value, the FlowFile will be routed to 'failure'. A default value of 1 MB is provided, primarily for 'Entire Text' mode. In 'Line-by-Line' Mode, a value such as 8 KB or 16 KB is suggested. This value is ignored and the buffer is not used if 'Regular Expression' is set to '.*'")
+            .description("Specifies the maximum amount of data to buffer (per file or per line, depending on the Evaluation Mode) in order to "
+                    + "apply the regular expressions. If 'Entire Text' (in Evaluation Mode) is selected and the FlowFile is larger than this value, "
+                    + "the FlowFile will be routed to 'failure'. "
+                    + "In 'Line-by-Line' Mode, if a single line is larger than this value, the FlowFile will be routed to 'failure'. A default value "
+                    + "of 1 MB is provided, primarily for 'Entire Text' mode. In 'Line-by-Line' Mode, a value such as 8 KB or 16 KB is suggested. "
+                    + "This value is ignored and the buffer is not used if 'Regular Expression' is set to '.*'")
             .required(true)
             .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
             .defaultValue("1 MB")
             .build();
     public static final PropertyDescriptor EVALUATION_MODE = new PropertyDescriptor.Builder()
             .name("Evaluation Mode")
-            .description("Evaluate the 'Regular Expression' against each line (Line-by-Line) or buffer the entire file into memory (Entire Text) and then evaluate the 'Regular Expression'.")
+            .description("Evaluate the 'Regular Expression' against each line (Line-by-Line) or buffer the entire file into memory (Entire Text) and "
+                    + "then evaluate the 'Regular Expression'.")
             .allowableValues(LINE_BY_LINE, ENTIRE_TEXT)
             .defaultValue(ENTIRE_TEXT)
             .required(true)
@@ -108,7 +119,8 @@ public class ReplaceText extends AbstractProcessor {
     // Relationships
     public static final Relationship REL_SUCCESS = new Relationship.Builder()
             .name("success")
-            .description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression")
+            .description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not "
+                    + "match the given Regular Expression")
             .build();
     public static final Relationship REL_FAILURE = new Relationship.Builder()
             .name("failure")
@@ -205,7 +217,7 @@ public class ReplaceText extends AbstractProcessor {
                 final int originalBackRefIndex = Integer.parseInt(backRefNum);
                 int backRefIndex = originalBackRefIndex;
 
-                // if we have a replacement value like $123, and we have less than 123 capturing groups, then 
+                // if we have a replacement value like $123, and we have less than 123 capturing groups, then
                 // we want to truncate the 3 and use capturing group 12; if we have less than 12 capturing groups,
                 // then we want to truncate the 2 and use capturing group 1; if we don't have a capturing group then
                 // we want to truncate the 1 and get 0.

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
index 5be2b69..04a9c56 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
@@ -68,7 +68,8 @@ import org.apache.commons.lang3.StringUtils;
 @SideEffectFree
 @SupportsBatching
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex", "Mapping"})
-@CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that matches the Regular Expression with some alternate value provided in a mapping file.")
+@CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that "
+        + "matches the Regular Expression with some alternate value provided in a mapping file.")
 public class ReplaceTextWithMapping extends AbstractProcessor {
 
     public static final PropertyDescriptor REGEX = new PropertyDescriptor.Builder()
@@ -109,7 +110,8 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
             .build();
     public static final PropertyDescriptor MAX_BUFFER_SIZE = new PropertyDescriptor.Builder()
             .name("Maximum Buffer Size")
-            .description("Specifies the maximum amount of data to buffer (per file) in order to apply the regular expressions. If a FlowFile is larger than this value, the FlowFile will be routed to 'failure'")
+            .description("Specifies the maximum amount of data to buffer (per file) in order to apply the regular expressions. If a FlowFile is larger "
+                    + "than this value, the FlowFile will be routed to 'failure'")
             .required(true)
             .addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
             .defaultValue("1 MB")
@@ -270,13 +272,6 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
         }
     }
 
-    /**
-     * Loads a file containing mappings.
-     *
-     * @param is
-     * @return
-     * @throws IOException
-     */
     protected Map<String, String> loadMappingFile(InputStream is) throws IOException {
         Map<String, String> mapping = new HashMap<>();
         BufferedReader reader = new BufferedReader(new InputStreamReader(is));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
index 8b6a7b4..7cba650 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
@@ -77,7 +77,8 @@ public class RouteOnAttribute extends AbstractProcessor {
             "A copy of the FlowFile will be routed to each relationship whose corresponding expression evaluates to 'true'");
     public static final AllowableValue ROUTE_ALL_MATCH = new AllowableValue(routeAllMatchValue, "Route to 'matched' if all match",
             "Requires that all user-defined expressions evaluate to 'true' for the FlowFile to be considered a match");
-    public static final AllowableValue ROUTE_ANY_MATCHES = new AllowableValue(routeAnyMatches, // keep the word 'match' instead of 'matched' to maintain backward compatibility (there was a typo originally)
+    // keep the word 'match' instead of 'matched' to maintain backward compatibility (there was a typo originally)
+    public static final AllowableValue ROUTE_ANY_MATCHES = new AllowableValue(routeAnyMatches,
             "Route to 'matched' if any matches",
             "Requires that at least one user-defined expression evaluate to 'true' for hte FlowFile to be considered a match");
 
@@ -243,8 +244,7 @@ public class RouteOnAttribute extends AbstractProcessor {
             }
 
             //now transfer the original flow file
-            logger.
-                    info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
+            logger.info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
             session.getProvenanceReporter().route(flowFile, firstRelationship);
             flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.getName());
             session.transfer(flowFile, firstRelationship);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
index 46629fe..1f0fc7b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
@@ -86,7 +86,10 @@ public class ScanAttribute extends AbstractProcessor {
             .build();
     public static final PropertyDescriptor DICTIONARY_FILTER = new PropertyDescriptor.Builder()
             .name("Dictionary Filter Pattern")
-            .description("A Regular Expression that will be applied to each line in the dictionary file. If the regular expression does not match the line, the line will not be included in the list of terms to search for. If a Matching Group is specified, only the portion of the term that matches that Matching Group will be used instead of the entire term. If not specified, all terms in the dictionary will be used and each term will consist of the text of the entire line in the file")
+            .description("A Regular Expression that will be applied to each line in the dictionary file. If the regular expression does not "
+                    + "match the line, the line will not be included in the list of terms to search for. If a Matching Group is specified, only the "
+                    + "portion of the term that matches that Matching Group will be used instead of the entire term. If not specified, all terms in "
+                    + "the dictionary will be used and each term will consist of the text of the entire line in the file")
             .required(false)
             .addValidator(StandardValidators.createRegexValidator(0, 1, false))
             .defaultValue(null)

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
index cfa0bda..3da1bd5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
@@ -225,8 +225,7 @@ public class SplitContent extends AbstractProcessor {
                         }
 
                         bytesRead++;
-                        boolean matched = buffer.
-                                addAndCompare((byte) (nextByte & 0xFF));
+                        boolean matched = buffer.addAndCompare((byte) (nextByte & 0xFF));
                         if (matched) {
                             long splitLength;
 
@@ -255,8 +254,7 @@ public class SplitContent extends AbstractProcessor {
             FlowFile clone = session.clone(flowFile);
             session.transfer(flowFile, REL_ORIGINAL);
             session.transfer(clone, REL_SPLITS);
-            logger.
-                    info("Found no match for {}; transferring original 'original' and transferring clone {} to 'splits'", new Object[]{flowFile, clone});
+            logger.info("Found no match for {}; transferring original 'original' and transferring clone {} to 'splits'", new Object[]{flowFile, clone});
             return;
         }
 
@@ -303,8 +301,7 @@ public class SplitContent extends AbstractProcessor {
      * @param splits splits
      */
     private void finishFragmentAttributes(final ProcessSession session, final FlowFile source, final List<FlowFile> splits) {
-        final String originalFilename = source.
-                getAttribute(CoreAttributes.FILENAME.key());
+        final String originalFilename = source.getAttribute(CoreAttributes.FILENAME.key());
 
         final String fragmentId = UUID.randomUUID().toString();
         final ArrayList<FlowFile> newList = new ArrayList<>(splits);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
index d641274..56bd729 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
@@ -140,15 +140,6 @@ public class SplitText extends AbstractProcessor {
         return properties;
     }
 
-    /**
-     * Reads up to the given maximum number of lines, copying them to out
-     *
-     * @param in
-     * @param maxNumLines
-     * @param out
-     * @return the number of lines actually copied
-     * @throws IOException
-     */
     private int readLines(final InputStream in, final int maxNumLines, final OutputStream out, final boolean keepAllNewLines) throws IOException {
         int numLines = 0;
         for (int i = 0; i < maxNumLines; i++) {
@@ -279,7 +270,7 @@ public class SplitText extends AbstractProcessor {
                                 if (linesCopied.get() > 0) {
                                     splits.add(splitFile);
                                 } else {
-                                    // if the number of content lines is a multiple of the SPLIT_LINE_COUNT, 
+                                    // if the number of content lines is a multiple of the SPLIT_LINE_COUNT,
                                     // the last flow file will contain just a header; don't forward that one
                                     session.remove(splitFile);
                                 }
@@ -341,13 +332,6 @@ public class SplitText extends AbstractProcessor {
         session.transfer(splits, REL_SPLITS);
     }
 
-    /**
-     * Apply split index, count and other attributes.
-     *
-     * @param session
-     * @param source
-     * @param unpacked
-     */
     private void finishFragmentAttributes(final ProcessSession session, final FlowFile source, final List<FlowFile> splits) {
         final String originalFilename = source.getAttribute(CoreAttributes.FILENAME.key());
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index adbfff2..617fcbe 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -68,7 +68,8 @@ public class SplitXml extends AbstractProcessor {
 
     public static final PropertyDescriptor SPLIT_DEPTH = new PropertyDescriptor.Builder()
             .name("Split Depth")
-            .description("Indicates the XML-nesting depth to start splitting XML fragments. A depth of 1 means split the root's children, whereas a depth of 2 means split the root's children's children and so forth.")
+            .description("Indicates the XML-nesting depth to start splitting XML fragments. A depth of 1 means split the root's children, whereas a depth of"
+                    + " 2 means split the root's children's children and so forth.")
             .required(true)
             .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
             .defaultValue("1")

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
index 2abf4a1..fc4730c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
@@ -132,35 +132,33 @@ public class TransformXml extends AbstractProcessor {
         final StopWatch stopWatch = new StopWatch(true);
 
         try {
-            FlowFile transformed = session.
-                    write(original, new StreamCallback() {
-                        @Override
-                        public void process(final InputStream rawIn, final OutputStream out) throws IOException {
-                            try (final InputStream in = new BufferedInputStream(rawIn)) {
-
-                                File stylesheet = new File(context.getProperty(XSLT_FILE_NAME).getValue());
-                                StreamSource styleSource = new StreamSource(stylesheet);
-                                TransformerFactory tfactory = new net.sf.saxon.TransformerFactoryImpl();
-                                Transformer transformer = tfactory.newTransformer(styleSource);
-
-                                // pass all dynamic properties to the transformer
-                                for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().
-                                entrySet()) {
-                                    if (entry.getKey().isDynamic()) {
-                                        String value = context.newPropertyValue(entry.getValue()).evaluateAttributeExpressions(original).getValue();
-                                        transformer.setParameter(entry.getKey().getName(), value);
-                                    }
-                                }
-
-                                // use a StreamSource with Saxon
-                                StreamSource source = new StreamSource(in);
-                                StreamResult result = new StreamResult(out);
-                                transformer.transform(source, result);
-                            } catch (final Exception e) {
-                                throw new IOException(e);
+            FlowFile transformed = session.write(original, new StreamCallback() {
+                @Override
+                public void process(final InputStream rawIn, final OutputStream out) throws IOException {
+                    try (final InputStream in = new BufferedInputStream(rawIn)) {
+
+                        File stylesheet = new File(context.getProperty(XSLT_FILE_NAME).getValue());
+                        StreamSource styleSource = new StreamSource(stylesheet);
+                        TransformerFactory tfactory = new net.sf.saxon.TransformerFactoryImpl();
+                        Transformer transformer = tfactory.newTransformer(styleSource);
+
+                        // pass all dynamic properties to the transformer
+                        for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
+                            if (entry.getKey().isDynamic()) {
+                                String value = context.newPropertyValue(entry.getValue()).evaluateAttributeExpressions(original).getValue();
+                                transformer.setParameter(entry.getKey().getName(), value);
                             }
                         }
-                    });
+
+                        // use a StreamSource with Saxon
+                        StreamSource source = new StreamSource(in);
+                        StreamResult result = new StreamResult(out);
+                        transformer.transform(source, result);
+                    } catch (final Exception e) {
+                        throw new IOException(e);
+                    }
+                }
+            });
             session.transfer(transformed, REL_SUCCESS);
             session.getProvenanceReporter().modifyContent(transformed, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             logger.info("Transformed {}", new Object[]{original});

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
index 6f228b2..ff4d936 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
@@ -68,14 +68,24 @@ import org.apache.nifi.util.ObjectHolder;
 @SideEffectFree
 @SupportsBatching
 @Tags({"Unpack", "un-merge", "tar", "zip", "archive", "flowfile-stream", "flowfile-stream-v3"})
-@CapabilityDescription("Unpacks the content of FlowFiles that have been packaged with one of several different Packaging Formats, emitting one to many FlowFiles for each input FlowFile")
-@ReadsAttribute(attribute = "mime.type", description = "If the <Packaging Format> property is set to use mime.type attribute, this attribute is used to determine the FlowFile's MIME Type. In this case, if the attribute is set to application/tar, the TAR Packaging Format will be used. If the attribute is set to application/zip, the ZIP Packaging Format will be used. If the attribute is set to application/flowfile-v3 or application/flowfile-v2 or application/flowfile-v1, the appropriate FlowFile Packaging Format will be used. If this attribute is missing, the FlowFile will be routed to 'failure'. Otherwise, if the attribute's value is not one of those mentioned above, the FlowFile will be routed to 'success' without being unpacked")
+@CapabilityDescription("Unpacks the content of FlowFiles that have been packaged with one of several different Packaging Formats, emitting one to many "
+        + "FlowFiles for each input FlowFile")
+@ReadsAttribute(attribute = "mime.type", description = "If the <Packaging Format> property is set to use mime.type attribute, this attribute is used "
+        + "to determine the FlowFile's MIME Type. In this case, if the attribute is set to application/tar, the TAR Packaging Format will be used. If "
+        + "the attribute is set to application/zip, the ZIP Packaging Format will be used. If the attribute is set to application/flowfile-v3 or "
+        + "application/flowfile-v2 or application/flowfile-v1, the appropriate FlowFile Packaging Format will be used. If this attribute is missing, "
+        + "the FlowFile will be routed to 'failure'. Otherwise, if the attribute's value is not one of those mentioned above, the FlowFile will be "
+        + "routed to 'success' without being unpacked")
 @WritesAttributes({
-    @WritesAttribute(attribute = "mime.type", description = "If the FlowFile is successfully unpacked, its MIME Type is no longer known, so the mime.type attribute is set to application/octet-stream."),
-    @WritesAttribute(attribute = "fragment.identifier", description = "All unpacked FlowFiles produced from the same parent FlowFile will have the same randomly generated UUID added for this attribute"),
-    @WritesAttribute(attribute = "fragment.index", description = "A one-up number that indicates the ordering of the unpacked FlowFiles that were created from a single parent FlowFile"),
+    @WritesAttribute(attribute = "mime.type", description = "If the FlowFile is successfully unpacked, its MIME Type is no longer known, so the mime.type "
+            + "attribute is set to application/octet-stream."),
+    @WritesAttribute(attribute = "fragment.identifier", description = "All unpacked FlowFiles produced from the same parent FlowFile will have the same randomly generated "
+            + "UUID added for this attribute"),
+    @WritesAttribute(attribute = "fragment.index", description = "A one-up number that indicates the ordering of the unpacked FlowFiles that were created from a single "
+            + "parent FlowFile"),
     @WritesAttribute(attribute = "fragment.count", description = "The number of unpacked FlowFiles generated from the parent FlowFile"),
-    @WritesAttribute(attribute = "segment.original.filename ", description = "The filename of the parent FlowFile. Extensions of .tar, .zip or .pkg are removed because the MergeContent processor automatically adds those extensions if it is used to rebuild the original FlowFile")})
+    @WritesAttribute(attribute = "segment.original.filename ", description = "The filename of the parent FlowFile. Extensions of .tar, .zip or .pkg are removed because "
+            + "the MergeContent processor automatically adds those extensions if it is used to rebuild the original FlowFile")})
 @SeeAlso(MergeContent.class)
 public class UnpackContent extends AbstractProcessor {
 
@@ -380,8 +390,7 @@ public class UnpackContent extends AbstractProcessor {
                                 mapAttributes(attributes, "content-encoding", CoreAttributes.MIME_TYPE.key());
                                 mapAttributes(attributes, "content-type", CoreAttributes.MIME_TYPE.key());
 
-                                if (!attributes.
-                                        containsKey(CoreAttributes.MIME_TYPE.key())) {
+                                if (!attributes.containsKey(CoreAttributes.MIME_TYPE.key())) {
                                     attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
                                 }
 
@@ -396,26 +405,12 @@ public class UnpackContent extends AbstractProcessor {
         }
     }
 
-    /**
-     * Maps attributes from legacy nifi to the new naming scheme
-     *
-     * @param attributes
-     * @param oldKey
-     * @param newKey
-     */
     private static void mapAttributes(final Map<String, String> attributes, final String oldKey, final String newKey) {
         if (!attributes.containsKey(newKey) && attributes.containsKey(oldKey)) {
             attributes.put(newKey, attributes.get(oldKey));
         }
     }
 
-    /**
-     * If the unpacked flowfiles contain fragment index attributes, then we need to apply fragment count and other attributes for completeness.
-     *
-     * @param session
-     * @param source
-     * @param unpacked
-     */
     private void finishFragmentAttributes(final ProcessSession session, final FlowFile source, final List<FlowFile> unpacked) {
         // first pass verifies all FlowFiles have the FRAGMENT_INDEX attribute and gets the total number of fragments
         int fragmentCount = 0;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
index 3f761d1..d505898 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
@@ -58,21 +58,21 @@ import org.xml.sax.SAXException;
 @CapabilityDescription("Validates the contents of FlowFiles against a user-specified XML Schema file")
 public class ValidateXml extends AbstractProcessor {
 
-    public static final PropertyDescriptor SCHEMA_FILE = new PropertyDescriptor.Builder().
-            name("Schema File").
-            description("The path to the Schema file that is to be used for validation").
-            required(true).
-            addValidator(StandardValidators.FILE_EXISTS_VALIDATOR).
-            build();
-
-    public static final Relationship REL_VALID = new Relationship.Builder().
-            name("valid").
-            description("FlowFiles that are successfully validated against the schema are routed to this relationship").
-            build();
-    public static final Relationship REL_INVALID = new Relationship.Builder().
-            name("invalid").
-            description("FlowFiles that are not valid according to the specified schema are routed to this relationship").
-            build();
+    public static final PropertyDescriptor SCHEMA_FILE = new PropertyDescriptor.Builder()
+            .name("Schema File")
+            .description("The path to the Schema file that is to be used for validation")
+            .required(true)
+            .addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
+            .build();
+
+    public static final Relationship REL_VALID = new Relationship.Builder()
+            .name("valid")
+            .description("FlowFiles that are successfully validated against the schema are routed to this relationship")
+            .build();
+    public static final Relationship REL_INVALID = new Relationship.Builder()
+            .name("invalid")
+            .description("FlowFiles that are not valid according to the specified schema are routed to this relationship")
+            .build();
 
     private static final String SCHEMA_LANGUAGE = "http://www.w3.org/2001/XMLSchema";
 
@@ -105,10 +105,8 @@ public class ValidateXml extends AbstractProcessor {
     @OnScheduled
     public void parseSchema(final ProcessContext context) throws IOException, SAXException {
         try {
-            final File file = new File(context.getProperty(SCHEMA_FILE).
-                    getValue());
-            final SchemaFactory schemaFactory = SchemaFactory.
-                    newInstance(SCHEMA_LANGUAGE);
+            final File file = new File(context.getProperty(SCHEMA_FILE).getValue());
+            final SchemaFactory schemaFactory = SchemaFactory.newInstance(SCHEMA_LANGUAGE);
             final Schema schema = schemaFactory.newSchema(file);
             this.schemaRef.set(schema);
         } catch (final SAXException e) {
@@ -136,23 +134,18 @@ public class ValidateXml extends AbstractProcessor {
                         validator.validate(new StreamSource(in));
                     } catch (final IllegalArgumentException | SAXException e) {
                         valid.set(false);
-                        logger.
-                                debug("Failed to validate {} against schema due to {}", new Object[]{flowFile, e});
+                        logger.debug("Failed to validate {} against schema due to {}", new Object[]{flowFile, e});
                     }
                 }
             });
 
             if (valid.get()) {
-                logger.
-                        info("Successfully validated {} against schema; routing to 'valid'", new Object[]{flowFile});
-                session.getProvenanceReporter().
-                        route(flowFile, REL_VALID);
+                logger.info("Successfully validated {} against schema; routing to 'valid'", new Object[]{flowFile});
+                session.getProvenanceReporter().route(flowFile, REL_VALID);
                 session.transfer(flowFile, REL_VALID);
             } else {
-                logger.
-                        info("Failed to validate {} against schema; routing to 'invalid'", new Object[]{flowFile});
-                session.getProvenanceReporter().
-                        route(flowFile, REL_INVALID);
+                logger.info("Failed to validate {} against schema; routing to 'invalid'", new Object[]{flowFile});
+                session.getProvenanceReporter().route(flowFile, REL_INVALID);
                 session.transfer(flowFile, REL_INVALID);
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
index ab12be2..7dd6797 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
@@ -50,11 +50,6 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
     private ProcessorLog logger;
     private ConcurrentMap<String, FlowFileEntryTimeWrapper> flowFileMap;
 
-    /**
-     *
-     * @param config
-     * @throws ServletException
-     */
     @SuppressWarnings("unchecked")
     @Override
     public void init(final ServletConfig config) throws ServletException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
index 81986ba..6a8f32f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
@@ -94,11 +94,6 @@ public class ListenHTTPServlet extends HttpServlet {
     private ConcurrentMap<String, FlowFileEntryTimeWrapper> flowFileMap;
     private StreamThrottler streamThrottler;
 
-    /**
-     *
-     * @param config
-     * @throws ServletException
-     */
     @SuppressWarnings("unchecked")
     @Override
     public void init(final ServletConfig config) throws ServletException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
index c9d906d..4ecccf8 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
@@ -41,11 +41,11 @@ public class Bin {
     /**
      * Constructs a new bin
      *
-     * @param minSizeBytes
-     * @param maxSizeBytes
-     * @param minEntries
-     * @param maxEntries
-     * @param fileCountAttribute
+     * @param minSizeBytes min bytes
+     * @param maxSizeBytes max bytes
+     * @param minEntries min entries
+     * @param maxEntries max entries
+     * @param fileCountAttribute num files
      * @throws IllegalArgumentException if the min is not less than or equal to the max.
      */
     public Bin(final long minSizeBytes, final long maxSizeBytes, final int minEntries, final int maxEntries, final String fileCountAttribute) {
@@ -75,7 +75,7 @@ public class Bin {
     /**
      * Indicates enough size exists to meet the minimum requirements
      *
-     * @return
+     * @return true if full enough
      */
     public boolean isFullEnough() {
         return isFull() || (size >= minimumSizeBytes && (binContents.size() >= minimumEntries));
@@ -84,8 +84,8 @@ public class Bin {
     /**
      * Determines if this bin is older than the time specified.
      *
-     * @param duration
-     * @param unit
+     * @param duration duration
+     * @param unit unit
      * @return true if this bin is older than the length of time given; false otherwise
      */
     public boolean isOlderThan(final int duration, final TimeUnit unit) {
@@ -96,8 +96,8 @@ public class Bin {
     /**
      * Determines if this bin is older than the specified bin
      *
-     * @param other
-     * @return
+     * @param other other bin
+     * @return true if this is older than given bin
      */
     public boolean isOlderThan(final Bin other) {
         return creationMomentEpochNs < other.creationMomentEpochNs;
@@ -106,7 +106,7 @@ public class Bin {
     /**
      * If this bin has enough room for the size of the given flow file then it is added otherwise it is not
      *
-     * @param flowFile
+     * @param flowFile flowfile to offer
      * @param session the ProcessSession to which the FlowFile belongs
      * @return true if added; false otherwise
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
index 9d0e857..90440a5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
@@ -152,7 +152,7 @@ public class BinManager {
      * <p/>
      * @param relaxFullnessConstraint if false will require bins to be full before considered ready; if true bins only have to meet their minimum size criteria or be 'old' and then they'll be
      * considered ready
-     * @return
+     * @return bins that are considered full
      */
     public Collection<Bin> removeReadyBins(boolean relaxFullnessConstraint) {
         final Map<String, List<Bin>> newGroupMap = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
index 8520813..4bca491 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
@@ -42,12 +42,6 @@ public class DocumentReaderCallback implements InputStreamCallback {
         this.isNamespaceAware = isNamespaceAware;
     }
 
-    /**
-     * Loads the Document from the specified stream.
-     *
-     * @param stream
-     * @throws IOException
-     */
     @Override
     public void process(final InputStream stream) throws IOException {
         try {
@@ -63,9 +57,7 @@ public class DocumentReaderCallback implements InputStreamCallback {
     }
 
     /**
-     * Returns the document.
-     *
-     * @return
+     * @return the document
      */
     public Document getDocument() {
         return document;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
index 21e6b4c..41a42bb 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
@@ -305,8 +305,7 @@ public class FTPTransfer implements FileTransfer {
         final FTPFile[] files = client.listFiles(path);
         FTPFile matchingFile = null;
         for (final FTPFile file : files) {
-            if (file.getName().
-                    equalsIgnoreCase(remoteFileName)) {
+            if (file.getName().equalsIgnoreCase(remoteFileName)) {
                 matchingFile = file;
                 break;
             }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
index 0e6a26f..adaba5c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
@@ -75,7 +75,7 @@ public class FTPUtils {
      * value of zero means do not timeout. Users should probably set a value here unless using very reliable communications links or else risk indefinite hangs that require a restart.</li>
      * </ul>
      *
-     * @param conf
+     * @param conf conf
      * @param monitor if provided will be used to monitor FTP commands processed but may be null
      * @return FTPClient connected to FTP server as configured
      * @throws NullPointerException if either argument is null

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
index ece0e59..f0061b8 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
@@ -136,7 +136,10 @@ public interface FileTransfer extends Closeable {
             .build();
     public static final PropertyDescriptor REMOTE_POLL_BATCH_SIZE = new PropertyDescriptor.Builder()
             .name("Remote Poll Batch Size")
-            .description("The value specifies how many file paths to find in a given directory on the remote system when doing a file listing. This value in general should not need to be modified but when polling against a remote system with a tremendous number of files this value can be critical.  Setting this value too high can result very poor performance and setting it too low can cause the flow to be slower than normal.")
+            .description("The value specifies how many file paths to find in a given directory on the remote system when doing a file listing. This value "
+                    + "in general should not need to be modified but when polling against a remote system with a tremendous number of files this value can "
+                    + "be critical.  Setting this value too high can result very poor performance and setting it too low can cause the flow to be slower "
+                    + "than normal.")
             .defaultValue("5000")
             .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
             .required(true)
@@ -194,41 +197,53 @@ public interface FileTransfer extends Closeable {
             .build();
     public static final PropertyDescriptor DOT_RENAME = new PropertyDescriptor.Builder()
             .name("Dot Rename")
-            .description("If true, then the filename of the sent file is prepended with a \".\" and then renamed back to the original once the file is completely sent. Otherwise, there is no rename. This property is ignored if the Temporary Filename property is set.")
+            .description("If true, then the filename of the sent file is prepended with a \".\" and then renamed back to the "
+                    + "original once the file is completely sent. Otherwise, there is no rename. This property is ignored if the "
+                    + "Temporary Filename property is set.")
             .allowableValues("true", "false")
             .defaultValue("true")
             .build();
     public static final PropertyDescriptor TEMP_FILENAME = new PropertyDescriptor.Builder()
             .name("Temporary Filename")
-            .description("If set, the filename of the sent file will be equal to the value specified during the transfer and after successful completion will be renamed to the original filename. If this value is set, the Dot Rename property is ignored.")
+            .description("If set, the filename of the sent file will be equal to the value specified during the transfer and after successful "
+                    + "completion will be renamed to the original filename. If this value is set, the Dot Rename property is ignored.")
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .expressionLanguageSupported(true)
             .required(false)
             .build();
     public static final PropertyDescriptor LAST_MODIFIED_TIME = new PropertyDescriptor.Builder()
             .name("Last Modified Time")
-            .description("The lastModifiedTime to assign to the file after transferring it. If not set, the lastModifiedTime will not be changed. Format must be yyyy-MM-dd'T'HH:mm:ssZ. You may also use expression language such as ${file.lastModifiedTime}. If the value is invalid, the processor will not be invalid but will fail to change lastModifiedTime of the file.")
+            .description("The lastModifiedTime to assign to the file after transferring it. If not set, the lastModifiedTime will not be changed. "
+                    + "Format must be yyyy-MM-dd'T'HH:mm:ssZ. You may also use expression language such as ${file.lastModifiedTime}. If the value "
+                    + "is invalid, the processor will not be invalid but will fail to change lastModifiedTime of the file.")
             .required(false)
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .expressionLanguageSupported(true)
             .build();
     public static final PropertyDescriptor PERMISSIONS = new PropertyDescriptor.Builder()
             .name("Permissions")
-            .description("The permissions to assign to the file after transferring it. Format must be either UNIX rwxrwxrwx with a - in place of denied permissions (e.g. rw-r--r--) or an octal number (e.g. 644). If not set, the permissions will not be changed. You may also use expression language such as ${file.permissions}. If the value is invalid, the processor will not be invalid but will fail to change permissions of the file.")
+            .description("The permissions to assign to the file after transferring it. Format must be either UNIX rwxrwxrwx with a - in place of "
+                    + "denied permissions (e.g. rw-r--r--) or an octal number (e.g. 644). If not set, the permissions will not be changed. You may "
+                    + "also use expression language such as ${file.permissions}. If the value is invalid, the processor will not be invalid but will "
+                    + "fail to change permissions of the file.")
             .required(false)
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .expressionLanguageSupported(true)
             .build();
     public static final PropertyDescriptor REMOTE_OWNER = new PropertyDescriptor.Builder()
             .name("Remote Owner")
-            .description("Integer value representing the User ID to set on the file after transferring it. If not set, the owner will not be set. You may also use expression language such as ${file.owner}. If the value is invalid, the processor will not be invalid but will fail to change the owner of the file.")
+            .description("Integer value representing the User ID to set on the file after transferring it. If not set, the owner will not be set. "
+                    + "You may also use expression language such as ${file.owner}. If the value is invalid, the processor will not be invalid but "
+                    + "will fail to change the owner of the file.")
             .required(false)
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .expressionLanguageSupported(true)
             .build();
     public static final PropertyDescriptor REMOTE_GROUP = new PropertyDescriptor.Builder()
             .name("Remote Group")
-            .description("Integer value representing the Group ID to set on the file after transferring it. If not set, the group will not be set. You may also use expression language such as ${file.group}. If the value is invalid, the processor will not be invalid but will fail to change the group of the file.")
+            .description("Integer value representing the Group ID to set on the file after transferring it. If not set, the group will not be set. "
+                    + "You may also use expression language such as ${file.group}. If the value is invalid, the processor will not be invalid but "
+                    + "will fail to change the group of the file.")
             .required(false)
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .expressionLanguageSupported(true)

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
index 5034b83..19955e7 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
@@ -149,8 +149,7 @@ public class SFTPTransfer implements FileTransfer {
             return;
         }
 
-        final boolean ignoreDottedFiles = ctx.
-                getProperty(FileTransfer.IGNORE_DOTTED_FILES).asBoolean();
+        final boolean ignoreDottedFiles = ctx.getProperty(FileTransfer.IGNORE_DOTTED_FILES).asBoolean();
         final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).asBoolean();
         final String fileFilterRegex = ctx.getProperty(FileTransfer.FILE_FILTER_REGEX).getValue();
         final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex);
@@ -234,8 +233,7 @@ public class SFTPTransfer implements FileTransfer {
             try {
                 getListing(newFullForwardPath, depth + 1, maxResults, listing);
             } catch (final IOException e) {
-                logger.
-                        error("Unable to get listing from " + newFullForwardPath + "; skipping this subdirectory");
+                logger.error("Unable to get listing from " + newFullForwardPath + "; skipping this subdirectory");
             }
         }
     }
@@ -310,8 +308,7 @@ public class SFTPTransfer implements FileTransfer {
                 channel.mkdir(remoteDirectory);
             } catch (SftpException e) {
                 if (e.id != ChannelSftp.SSH_FX_FAILURE) {
-                    throw new IOException("Could not blindly create remote directory due to " + e.
-                            getMessage(), e);
+                    throw new IOException("Could not blindly create remote directory due to " + e.getMessage(), e);
                 }
             }
             return;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/XmlSplitterSaxParser.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/XmlSplitterSaxParser.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/XmlSplitterSaxParser.java
index d053f14..d012ae0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/XmlSplitterSaxParser.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/XmlSplitterSaxParser.java
@@ -71,9 +71,7 @@ public class XmlSplitterSaxParser extends DefaultHandler {
     @Override
     public void endElement(final String uri, final String localName, final String qName) throws SAXException {
         // Add the element end tag.
-        sb.append("</").
-                append(qName).
-                append(">");
+        sb.append("</").append(qName).append(">");
 
         // We have finished processing this element. Decrement the depth.
         int newDepth = depth.decrementAndGet();
@@ -104,12 +102,7 @@ public class XmlSplitterSaxParser extends DefaultHandler {
             for (int i = 0; i < attCount; i++) {
                 String attName = atts.getQName(i);
                 String attValue = atts.getValue(i);
-                sb.append(" ").
-                        append(attName).
-                        append("=").
-                        append("\"").
-                        append(attValue).
-                        append("\"");
+                sb.append(" ").append(attName).append("=").append("\"").append(attValue).append("\"");
             }
 
             sb.append(">");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
index ac2efec..1965314 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDistributeLoad.java
@@ -29,8 +29,7 @@ public class TestDistributeLoad {
     public static void before() {
         System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
         System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.DistributeLoad", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.DistributeLoad", "debug");
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
index 6012b04..688b9eb 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestHandleHttpRequest.java
@@ -60,8 +60,8 @@ public class TestHandleHttpRequest {
                 public void run() {
                     try {
                         final int port = ((HandleHttpRequest) runner.getProcessor()).getPort();
-                        final HttpURLConnection connection = (HttpURLConnection) new URL("http://localhost:" + port + "/my/path?query=true&value1=value1&value2=&value3&value4=apple=orange").
-                                openConnection();
+                        final HttpURLConnection connection = (HttpURLConnection) new URL("http://localhost:"
+                                + port + "/my/path?query=true&value1=value1&value2=&value3&value4=apple=orange").openConnection();
                         connection.setDoOutput(false);
                         connection.setRequestMethod("GET");
                         connection.setRequestProperty("header1", "value1");


[03/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMonitorActivity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMonitorActivity.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMonitorActivity.java
index 9e970f1..2e87441 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMonitorActivity.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMonitorActivity.java
@@ -31,8 +31,7 @@ public class TestMonitorActivity {
 
     @Test
     public void testFirstMessage() throws InterruptedException, IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new MonitorActivity());
+        final TestRunner runner = TestRunners.newTestRunner(new MonitorActivity());
         runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "false");
         runner.setProperty(MonitorActivity.THRESHOLD, "100 millis");
 
@@ -66,12 +65,9 @@ public class TestMonitorActivity {
         runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
         runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
 
-        MockFlowFile restoredFlowFile = runner.
-                getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).
-                get(0);
+        MockFlowFile restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
         String flowFileContent = new String(restoredFlowFile.toByteArray());
-        Assert.assertTrue(Pattern.
-                matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
+        Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
         restoredFlowFile.assertAttributeNotExists("key");
         restoredFlowFile.assertAttributeNotExists("key1");
 
@@ -96,20 +92,16 @@ public class TestMonitorActivity {
         runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
         runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
 
-        restoredFlowFile = runner.
-                getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).
-                get(0);
+        restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
         flowFileContent = new String(restoredFlowFile.toByteArray());
-        Assert.assertTrue(Pattern.
-                matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
+        Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
         restoredFlowFile.assertAttributeNotExists("key");
         restoredFlowFile.assertAttributeNotExists("key1");
     }
 
     @Test
     public void testFirstMessageWithInherit() throws InterruptedException, IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new MonitorActivity());
+        final TestRunner runner = TestRunners.newTestRunner(new MonitorActivity());
         runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "false");
         runner.setProperty(MonitorActivity.THRESHOLD, "100 millis");
         runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
@@ -117,9 +109,7 @@ public class TestMonitorActivity {
         runner.enqueue(new byte[0]);
         runner.run();
         runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS, 1);
-        MockFlowFile originalFlowFile = runner.
-                getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS).
-                get(0);
+        MockFlowFile originalFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS).get(0);
         runner.clearTransferState();
 
         Thread.sleep(1000L);
@@ -147,33 +137,21 @@ public class TestMonitorActivity {
         runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
         runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
 
-        MockFlowFile restoredFlowFile = runner.
-                getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).
-                get(0);
+        MockFlowFile restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
         String flowFileContent = new String(restoredFlowFile.toByteArray());
-        Assert.assertTrue(Pattern.
-                matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
+        Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
         restoredFlowFile.assertAttributeEquals("key", "value");
         restoredFlowFile.assertAttributeEquals("key1", "value1");
 
         // verify the UUIDs are not the same
-        restoredFlowFile.
-                assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.
-                        getAttribute(CoreAttributes.UUID.key()));
-        restoredFlowFile.
-                assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.
-                        getAttribute(CoreAttributes.FILENAME.key()));
+        restoredFlowFile.assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.getAttribute(CoreAttributes.UUID.key()));
+        restoredFlowFile.assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.getAttribute(CoreAttributes.FILENAME.key()));
         Assert.assertTrue(
-                String.
-                format("file sizes match when they shouldn't original=%1$s restored=%2$s",
-                        originalFlowFile.getSize(), restoredFlowFile.getSize()),
-                restoredFlowFile.getSize() != originalFlowFile.getSize());
-        Assert.assertTrue(String.
-                format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
-                        originalFlowFile.getLineageStartDate(), restoredFlowFile.
-                        getLineageStartDate()),
-                restoredFlowFile.getLineageStartDate() != originalFlowFile.
-                getLineageStartDate());
+                String.format("file sizes match when they shouldn't original=%1$s restored=%2$s",
+                        originalFlowFile.getSize(), restoredFlowFile.getSize()), restoredFlowFile.getSize() != originalFlowFile.getSize());
+        Assert.assertTrue(
+                String.format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
+                        originalFlowFile.getLineageStartDate(), restoredFlowFile.getLineageStartDate()), restoredFlowFile.getLineageStartDate() != originalFlowFile.getLineageStartDate());
 
         runner.clearTransferState();
         runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "true");
@@ -196,30 +174,18 @@ public class TestMonitorActivity {
         runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
         runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
 
-        restoredFlowFile = runner.
-                getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).
-                get(0);
+        restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
         flowFileContent = new String(restoredFlowFile.toByteArray());
-        Assert.assertTrue(Pattern.
-                matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
+        Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
         restoredFlowFile.assertAttributeEquals("key", "value");
         restoredFlowFile.assertAttributeEquals("key1", "value1");
-        restoredFlowFile.
-                assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.
-                        getAttribute(CoreAttributes.UUID.key()));
-        restoredFlowFile.
-                assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.
-                        getAttribute(CoreAttributes.FILENAME.key()));
+        restoredFlowFile.assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.getAttribute(CoreAttributes.UUID.key()));
+        restoredFlowFile.assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.getAttribute(CoreAttributes.FILENAME.key()));
+        Assert.assertTrue(
+                String.format("file sizes match when they shouldn't original=%1$s restored=%2$s",
+                        originalFlowFile.getSize(), restoredFlowFile.getSize()), restoredFlowFile.getSize() != originalFlowFile.getSize());
         Assert.assertTrue(
-                String.
-                format("file sizes match when they shouldn't original=%1$s restored=%2$s",
-                        originalFlowFile.getSize(), restoredFlowFile.getSize()),
-                restoredFlowFile.getSize() != originalFlowFile.getSize());
-        Assert.assertTrue(String.
-                format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
-                        originalFlowFile.getLineageStartDate(), restoredFlowFile.
-                        getLineageStartDate()),
-                restoredFlowFile.getLineageStartDate() != originalFlowFile.
-                getLineageStartDate());
+                String.format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
+                        originalFlowFile.getLineageStartDate(), restoredFlowFile.getLineageStartDate()), restoredFlowFile.getLineageStartDate() != originalFlowFile.getLineageStartDate());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPostHTTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPostHTTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPostHTTP.java
index f4c4367..bd35868 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPostHTTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPostHTTP.java
@@ -65,21 +65,16 @@ public class TestPostHTTP {
     public void testTruststoreSSLOnly() throws Exception {
         final Map<String, String> sslProps = new HashMap<>();
         sslProps.put(TestServer.NEED_CLIENT_AUTH, "false");
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        sslProps.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
         setup(sslProps);
 
         final SSLContextService sslContextService = new StandardSSLContextService();
         runner.addControllerService("ssl-context", sslContextService);
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
         runner.enableControllerService(sslContextService);
 
         runner.setProperty(PostHTTP.URL, server.getSecureUrl());
@@ -94,33 +89,23 @@ public class TestPostHTTP {
     @Test
     public void testTwoWaySSL() throws Exception {
         final Map<String, String> sslProps = new HashMap<>();
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        sslProps.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         sslProps.put(TestServer.NEED_CLIENT_AUTH, "true");
         setup(sslProps);
 
         final SSLContextService sslContextService = new StandardSSLContextService();
         runner.addControllerService("ssl-context", sslContextService);
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE, "src/test/resources/localhost-ks.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE, "src/test/resources/localhost-ks.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE_TYPE, "JKS");
         runner.enableControllerService(sslContextService);
 
         runner.setProperty(PostHTTP.URL, server.getSecureUrl());
@@ -135,27 +120,20 @@ public class TestPostHTTP {
     @Test
     public void testOneWaySSLWhenServerConfiguredForTwoWay() throws Exception {
         final Map<String, String> sslProps = new HashMap<>();
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        sslProps.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         sslProps.put(TestServer.NEED_CLIENT_AUTH, "true");
         setup(sslProps);
 
         final SSLContextService sslContextService = new StandardSSLContextService();
         runner.addControllerService("ssl-context", sslContextService);
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
         runner.enableControllerService(sslContextService);
 
         runner.setProperty(PostHTTP.URL, server.getSecureUrl());
@@ -191,8 +169,7 @@ public class TestPostHTTP {
         FlowFileUnpackagerV3 unpacker = new FlowFileUnpackagerV3();
 
         // unpack first flowfile received
-        Map<String, String> receivedAttrs = unpacker.
-                unpackageFlowFile(bais, baos);
+        Map<String, String> receivedAttrs = unpacker.unpackageFlowFile(bais, baos);
         byte[] contentReceived = baos.toByteArray();
         assertEquals("Hello", new String(contentReceived));
         assertEquals("cba", receivedAttrs.get("abc"));
@@ -211,33 +188,23 @@ public class TestPostHTTP {
     @Test
     public void testSendAsFlowFileSecure() throws Exception {
         final Map<String, String> sslProps = new HashMap<>();
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        sslProps.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        sslProps.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
-        sslProps.
-                put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
+        sslProps.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
         sslProps.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         sslProps.put(TestServer.NEED_CLIENT_AUTH, "true");
         setup(sslProps);
 
         final SSLContextService sslContextService = new StandardSSLContextService();
         runner.addControllerService("ssl-context", sslContextService);
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE, "src/test/resources/localhost-ks.jks");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE_PASSWORD, "localtest");
-        runner.
-                setProperty(sslContextService, StandardSSLContextService.KEYSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE, "src/test/resources/localhost-ts.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.TRUSTSTORE_TYPE, "JKS");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE, "src/test/resources/localhost-ks.jks");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE_PASSWORD, "localtest");
+        runner.setProperty(sslContextService, StandardSSLContextService.KEYSTORE_TYPE, "JKS");
         runner.enableControllerService(sslContextService);
 
         runner.setProperty(PostHTTP.URL, server.getSecureUrl());
@@ -262,8 +229,7 @@ public class TestPostHTTP {
         FlowFileUnpackagerV3 unpacker = new FlowFileUnpackagerV3();
 
         // unpack first flowfile received
-        Map<String, String> receivedAttrs = unpacker.
-                unpackageFlowFile(bais, baos);
+        Map<String, String> receivedAttrs = unpacker.unpackageFlowFile(bais, baos);
         byte[] contentReceived = baos.toByteArray();
         assertEquals("Hello", new String(contentReceived));
         assertEquals("cba", receivedAttrs.get("abc"));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutEmail.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutEmail.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutEmail.java
index 313790e..af04cbc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutEmail.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutEmail.java
@@ -53,28 +53,21 @@ public class TestPutEmail {
         final TestRunner runner = TestRunners.newTestRunner(new PutEmail());
         runner.setProperty(PutEmail.HEADER_XMAILER, "TestingNiFi");
         runner.setProperty(PutEmail.SMTP_HOSTNAME, "smtp-host");
-        runner.
-                setProperty(PutEmail.SMTP_SOCKET_FACTORY, "${dynamicSocketFactory}");
+        runner.setProperty(PutEmail.SMTP_SOCKET_FACTORY, "${dynamicSocketFactory}");
         runner.setProperty(PutEmail.HEADER_XMAILER, "TestingNiFi");
         runner.setProperty(PutEmail.FROM, "test@apache.org");
         runner.setProperty(PutEmail.MESSAGE, "Message Body");
         runner.setProperty(PutEmail.TO, "recipient@apache.org");
 
-        ProcessSession session = runner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = runner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
-        ff = session.
-                putAttribute(ff, "dynamicSocketFactory", "testingSocketFactory");
+        ff = session.putAttribute(ff, "dynamicSocketFactory", "testingSocketFactory");
         ProcessContext context = runner.getProcessContext();
 
-        String xmailer = context.getProperty(PutEmail.HEADER_XMAILER).
-                evaluateAttributeExpressions(ff).
-                getValue();
+        String xmailer = context.getProperty(PutEmail.HEADER_XMAILER).evaluateAttributeExpressions(ff).getValue();
         assertEquals("X-Mailer Header", "TestingNiFi", xmailer);
 
-        String socketFactory = context.getProperty(PutEmail.SMTP_SOCKET_FACTORY).
-                evaluateAttributeExpressions(ff).
-                getValue();
+        String socketFactory = context.getProperty(PutEmail.SMTP_SOCKET_FACTORY).evaluateAttributeExpressions(ff).getValue();
         assertEquals("Socket Factory", "testingSocketFactory", socketFactory);
 
         final Map<String, String> attributes = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceText.java
index 4722a84..e340468 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceText.java
@@ -42,9 +42,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Hlleo, World!".getBytes("UTF-8"));
     }
 
@@ -59,9 +57,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("H[ell]o, World!");
     }
 
@@ -78,9 +74,7 @@ public class TestReplaceText {
         runner.enqueue(Paths.get("src/test/resources/hello.txt"), attributes);
         runner.run();
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String actual = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(actual);
         Assert.assertEquals(expected, actual);
@@ -98,9 +92,7 @@ public class TestReplaceText {
         runner.enqueue(Paths.get("src/test/resources/hello.txt"), attributes);
         runner.run();
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String actual = new String(out.toByteArray(), StandardCharsets.UTF_8);
         Assert.assertEquals("Hell123o, World!", actual);
     }
@@ -119,9 +111,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Hello, World!");
     }
 
@@ -139,9 +129,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("H[$1]o, World!");
     }
 
@@ -160,9 +148,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Good-bye, World!");
     }
 
@@ -181,9 +167,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Hello, World!");
     }
 
@@ -202,9 +186,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Hell$2o, World!");
     }
 
@@ -223,9 +205,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("H$do, World!");
     }
 
@@ -244,9 +224,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("H$1o, World!");
     }
 
@@ -261,9 +239,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Ho, World!");
     }
 
@@ -278,9 +254,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Hello, World!");
     }
 
@@ -295,9 +269,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("HeRRo, WorRd!");
     }
 
@@ -315,9 +287,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Good");
     }
 
@@ -353,9 +323,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("Good");
     }
 
@@ -364,8 +332,7 @@ public class TestReplaceText {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
         runner.setProperty(ReplaceText.REGEX, ".*");
-        runner.
-                setProperty(ReplaceText.REPLACEMENT_VALUE, "${filename}\t${now():format(\"yyyy/MM/dd'T'HHmmss'Z'\")}\t${fileSize}\n");
+        runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${filename}\t${now():format(\"yyyy/MM/dd'T'HHmmss'Z'\")}\t${fileSize}\n");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("filename", "abc.txt");
@@ -374,9 +341,7 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         Assert.assertTrue(outContent.startsWith("abc.txt\t"));
         System.out.println(outContent);
@@ -388,8 +353,7 @@ public class TestReplaceText {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
         runner.setProperty(ReplaceText.REGEX, "(?s)(^.*)");
-        runner.
-                setProperty(ReplaceText.REPLACEMENT_VALUE, "attribute header\n\n${filename}\n\ndata header\n\n$1\n\nfooter");
+        runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "attribute header\n\n${filename}\n\ndata header\n\n$1\n\nfooter");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("filename", "abc.txt");
@@ -398,12 +362,9 @@ public class TestReplaceText {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
-        Assert.assertTrue(outContent.
-                equals("attribute header\n\nabc.txt\n\ndata header\n\nHello\nWorld!\n\nfooter"));
+        Assert.assertTrue(outContent.equals("attribute header\n\nabc.txt\n\ndata header\n\nHello\nWorld!\n\nfooter"));
         System.out.println(outContent);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextLineByLine.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextLineByLine.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextLineByLine.java
index 9c19369..005c05a 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextLineByLine.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextLineByLine.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ReplaceText;
 import java.io.File;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -40,300 +39,236 @@ public class TestReplaceTextLineByLine {
     public void testSimple() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "odo");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "ood");
 
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/food.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/food.txt")));
     }
 
     @Test
     public void testBackReference() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(DODO)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "[$1]");
 
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/[DODO].txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/[DODO].txt")));
     }
 
     @Test
     public void testReplacementWithExpressionLanguageIsEscaped() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(jo)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "[${abc}]");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "$1");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/cu[$1]_Po[$1].txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/cu[$1]_Po[$1].txt")));
     }
 
     @Test
     public void testRegexWithExpressionLanguage() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "${replaceKey}");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${replaceValue}");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("replaceKey", "Riley");
         attributes.put("replaceValue", "Spider");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Spider.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Spider.txt")));
     }
 
     @Test
     public void testRegexWithExpressionLanguageIsEscaped() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "${replaceKey}");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${replaceValue}");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("replaceKey", "R.*y");
         attributes.put("replaceValue", "Spider");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
     }
 
     @Test
     public void testBackReferenceWithTooLargeOfIndexIsEscaped() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(lu)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "$1$2");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("replaceKey", "R.*y");
         attributes.put("replaceValue", "Spiderman");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Blu$2e_clu$2e.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Blu$2e_clu$2e.txt")));
     }
 
     @Test
     public void testBackReferenceWithInvalidReferenceIsEscaped() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(ew)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "$d");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("replaceKey", "H.*o");
         attributes.put("replaceValue", "Good-bye");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/D$d_h$d.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/D$d_h$d.txt")));
     }
 
     @Test
     public void testEscapingDollarSign() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(DO)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "\\$1");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("replaceKey", "H.*o");
         attributes.put("replaceValue", "Good-bye");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/$1$1.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/$1$1.txt")));
     }
 
     @Test
     public void testReplaceWithEmptyString() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(jo)");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "");
 
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/cu_Po.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/cu_Po.txt")));
     }
 
     @Test
     public void testWithNoMatch() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "Z");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "Morning");
 
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
     }
 
     @Test
     public void testWithMultipleMatches() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "l");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "R");
 
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")));
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/BRue_cRue_RiRey.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/BRue_cRue_RiRey.txt")));
     }
 
     @Test
     public void testAttributeToContent() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, ".*");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${abc}");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "Good");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
-        out.
-                assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Good.txt")));
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
+        out.assertContentEquals(translateNewLines(new File("src/test/resources/TestReplaceTextLineByLine/Good.txt")));
     }
 
     @Test
     public void testAttributeToContentWindows() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, ".*");
         runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${abc}");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "Good");
-        runner.
-                enqueue("<<<HEADER>>>\r\n<<BODY>>\r\n<<<FOOTER>>>\r".getBytes(), attributes);
+        runner.enqueue("<<<HEADER>>>\r\n<<BODY>>\r\n<<<FOOTER>>>\r".getBytes(), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         out.assertContentEquals("GoodGoodGood");
     }
 
@@ -341,39 +276,31 @@ public class TestReplaceTextLineByLine {
     public void testProblematicCase1() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, ".*");
-        runner.
-                setProperty(ReplaceText.REPLACEMENT_VALUE, "${filename}\t${now():format(\"yyyy/MM/dd'T'HHmmss'Z'\")}\t${fileSize}\n");
+        runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "${filename}\t${now():format(\"yyyy/MM/dd'T'HHmmss'Z'\")}\t${fileSize}\n");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("filename", "abc.txt");
-        runner.enqueue(translateNewLines(Paths.
-                get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
+        runner.enqueue(translateNewLines(Paths.get("src/test/resources/TestReplaceTextLineByLine/testFile.txt")), attributes);
 
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String outContent = translateNewLines(new String(out.toByteArray(), StandardCharsets.UTF_8));
         Assert.assertTrue(outContent.startsWith("abc.txt\t"));
         System.out.println(outContent);
-        Assert.assertTrue(outContent.endsWith("193\n") || outContent.
-                endsWith("203\r\n"));
+        Assert.assertTrue(outContent.endsWith("193\n") || outContent.endsWith("203\r\n"));
     }
 
     @Test
     public void testGetExistingContent() throws IOException {
         final TestRunner runner = TestRunners.newTestRunner(new ReplaceText());
         runner.setValidateExpressionUsage(false);
-        runner.
-                setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
+        runner.setProperty(ReplaceText.EVALUATION_MODE, ReplaceText.LINE_BY_LINE);
         runner.setProperty(ReplaceText.REGEX, "(?s)(^.*)");
-        runner.
-                setProperty(ReplaceText.REPLACEMENT_VALUE, "attribute header\n\n${filename}\n\ndata header\n\n$1\n\nfooter\n");
+        runner.setProperty(ReplaceText.REPLACEMENT_VALUE, "attribute header\n\n${filename}\n\ndata header\n\n$1\n\nfooter\n");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("filename", "abc.txt");
@@ -382,14 +309,11 @@ public class TestReplaceTextLineByLine {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(ReplaceText.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).
-                get(0);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceText.REL_SUCCESS).get(0);
         final String outContent = new String(out.toByteArray(), StandardCharsets.UTF_8);
         System.out.println(outContent);
-        Assert.assertTrue(outContent.
-                equals("attribute header\n\nabc.txt\n\ndata header\n\nHello\n\n\nfooter\n"
-                        + "attribute header\n\nabc.txt\n\ndata header\n\nWorld!\n\nfooter\n"));
+        Assert.assertTrue(outContent.equals("attribute header\n\nabc.txt\n\ndata header\n\nHello\n\n\nfooter\n"
+                + "attribute header\n\nabc.txt\n\ndata header\n\nWorld!\n\nfooter\n"));
 
     }
 
@@ -400,15 +324,13 @@ public class TestReplaceTextLineByLine {
     private byte[] translateNewLines(final Path path) throws IOException {
         final byte[] data = Files.readAllBytes(path);
         final String text = new String(data, StandardCharsets.UTF_8);
-        return translateNewLines(text).
-                getBytes(StandardCharsets.UTF_8);
+        return translateNewLines(text).getBytes(StandardCharsets.UTF_8);
     }
 
     private String translateNewLines(final String text) {
         final String lineSeparator = System.getProperty("line.separator");
         final Pattern pattern = Pattern.compile("\n", Pattern.MULTILINE);
-        final String translated = pattern.matcher(text).
-                replaceAll(lineSeparator);
+        final String translated = pattern.matcher(text).replaceAll(lineSeparator);
         return translated;
     }
 }


[36/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
index 573c407..f7b2009 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserGroupResource.java
@@ -54,7 +54,7 @@ public class UserGroupResource extends ApplicationResource {
 
     /*
      * Developer Note: Clustering assumes a centralized security provider. The
-     * cluster manager will manage user accounts when in clustered mode and 
+     * cluster manager will manage user accounts when in clustered mode and
      * interface with the authorization provider. However, when nodes perform
      * Site-to-Site, the authorization details of the remote NiFi will be cached
      * locally. These details need to be invalidated when certain actions are
@@ -68,18 +68,14 @@ public class UserGroupResource extends ApplicationResource {
     /**
      * Updates a new user group.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param userIds A collection of user ids to include in this group. If a
-     * user already belongs to another group, they will be placed in this group
-     * instead. Existing users in this group will remain in this group.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param userIds A collection of user ids to include in this group. If a user already belongs to another group, they will be placed in this group instead. Existing users in this group will remain
+     * in this group.
      * @param group The name of the group.
-     * @param rawAuthorities Array of authorities to assign to the specified
-     * user.
+     * @param rawAuthorities Array of authorities to assign to the specified user.
      * @param status The status of the specified users account.
-     * @param formParams
+     * @param formParams form params
      * @return A userGroupEntity.
      */
     @PUT
@@ -132,7 +128,7 @@ public class UserGroupResource extends ApplicationResource {
     /**
      * Creates a new user group with the specified users.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param group The user group.
      * @param userGroupEntity A userGroupEntity.
      * @return A userGroupEntity.
@@ -177,7 +173,7 @@ public class UserGroupResource extends ApplicationResource {
         // this user is being modified, replicate to the nodes to invalidate this account
         // so that it will be re-authorized during the next attempted access - if this wasn't
         // done the account would remain stale for up to the configured cache duration. this
-        // is acceptable sometimes but when updating a users authorities or groups via the UI 
+        // is acceptable sometimes but when updating a users authorities or groups via the UI
         // they shouldn't have to wait for the changes to take effect`
         if (properties.isClusterManager()) {
             // change content type to JSON for serializing entity
@@ -233,15 +229,12 @@ public class UserGroupResource extends ApplicationResource {
     }
 
     /**
-     * Deletes the user from the specified group. The user will not be removed,
-     * just the fact that they were in this group.
+     * Deletes the user from the specified group. The user will not be removed, just the fact that they were in this group.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param group The user group.
      * @param userId The user id to remove.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A userGroupEntity.
      */
     @DELETE
@@ -258,7 +251,7 @@ public class UserGroupResource extends ApplicationResource {
         // this user is being modified, replicate to the nodes to invalidate this account
         // so that it will be re-authorized during the next attempted access - if this wasn't
         // done the account would remain stale for up to the configured cache duration. this
-        // is acceptable sometimes but when removing a user via the UI they shouldn't have to 
+        // is acceptable sometimes but when removing a user via the UI they shouldn't have to
         // wait for the changes to take effect
         if (properties.isClusterManager()) {
             // identify yourself as the NCM attempting to invalidate the user
@@ -302,14 +295,11 @@ public class UserGroupResource extends ApplicationResource {
     }
 
     /**
-     * Deletes the user group. The users will not be removed, just the fact that
-     * they were grouped.
+     * Deletes the user group. The users will not be removed, just the fact that they were grouped.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param group The user group.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A userGroupEntity.
      */
     @DELETE
@@ -325,7 +315,7 @@ public class UserGroupResource extends ApplicationResource {
         // this user is being modified, replicate to the nodes to invalidate this account
         // so that it will be re-authorized during the next attempted access - if this wasn't
         // done the account would remain stale for up to the configured cache duration. this
-        // is acceptable sometimes but when removing a user via the UI they shouldn't have to 
+        // is acceptable sometimes but when removing a user via the UI they shouldn't have to
         // wait for the changes to take effect
         if (properties.isClusterManager()) {
             // identify yourself as the NCM attempting to invalidate the user

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
index 09307c8..6dbb1a7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/UserResource.java
@@ -66,7 +66,7 @@ public class UserResource extends ApplicationResource {
 
     /*
      * Developer Note: Clustering assumes a centralized security provider. The
-     * cluster manager will manage user accounts when in clustered mode and 
+     * cluster manager will manage user accounts when in clustered mode and
      * interface with the authorization provider. However, when nodes perform
      * Site-to-Site, the authorization details of the remote NiFi will be cached
      * locally. These details need to be invalidated when certain actions are
@@ -80,9 +80,7 @@ public class UserResource extends ApplicationResource {
     /**
      * Gets all users that are registered within this Controller.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param grouped Whether to return the users in their groups.
      * @return A usersEntity.
      */
@@ -114,9 +112,7 @@ public class UserResource extends ApplicationResource {
     /**
      * Gets the details for the specified user.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The user id.
      * @return A userEntity.
      */
@@ -237,15 +233,12 @@ public class UserResource extends ApplicationResource {
     /**
      * Updates the specified user.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the user to update.
-     * @param rawAuthorities Array of authorities to assign to the specified
-     * user.
+     * @param rawAuthorities Array of authorities to assign to the specified user.
      * @param status The status of the specified users account.
-     * @param formParams
+     * @param formParams form params
      * @return A userEntity
      */
     @PUT
@@ -296,7 +289,7 @@ public class UserResource extends ApplicationResource {
     /**
      * Updates the specified user.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the user to update.
      * @param userEntity A userEntity
      * @return A userEntity
@@ -334,7 +327,7 @@ public class UserResource extends ApplicationResource {
         // this user is being modified, replicate to the nodes to invalidate this account
         // so that it will be re-authorized during the next attempted access - if this wasn't
         // done the account would remain stale for up to the configured cache duration. this
-        // is acceptable sometimes but when updating a users authorities or groups via the UI 
+        // is acceptable sometimes but when updating a users authorities or groups via the UI
         // they shouldn't have to wait for the changes to take effect`
         if (properties.isClusterManager()) {
             // change content type to JSON for serializing entity
@@ -391,11 +384,9 @@ public class UserResource extends ApplicationResource {
     /**
      * Deletes the specified user.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The user id
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A userEntity.
      */
     @DELETE
@@ -411,7 +402,7 @@ public class UserResource extends ApplicationResource {
         // this user is being modified, replicate to the nodes to invalidate this account
         // so that it will be re-authorized during the next attempted access - if this wasn't
         // done the account would remain stale for up to the configured cache duration. this
-        // is acceptable sometimes but when removing a user via the UI they shouldn't have to 
+        // is acceptable sometimes but when removing a user via the UI they shouldn't have to
         // wait for the changes to take effect
         if (properties.isClusterManager()) {
             // identify yourself as the NCM attempting to invalidate the user

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AdministrationExceptionMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AdministrationExceptionMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AdministrationExceptionMapper.java
index d631d82..19b8e19 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AdministrationExceptionMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AdministrationExceptionMapper.java
@@ -20,7 +20,6 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
 import org.apache.nifi.admin.service.AdministrationException;
-import org.apache.nifi.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AuthenticationCredentialsNotFoundExceptionMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AuthenticationCredentialsNotFoundExceptionMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AuthenticationCredentialsNotFoundExceptionMapper.java
index 72a9c7b..bd35a98 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AuthenticationCredentialsNotFoundExceptionMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/AuthenticationCredentialsNotFoundExceptionMapper.java
@@ -25,8 +25,7 @@ import org.slf4j.LoggerFactory;
 import org.springframework.security.authentication.AuthenticationCredentialsNotFoundException;
 
 /**
- * Maps exceptions that occur because no valid credentials were found into the
- * corresponding response.
+ * Maps exceptions that occur because no valid credentials were found into the corresponding response.
  */
 @Provider
 public class AuthenticationCredentialsNotFoundExceptionMapper implements ExceptionMapper<AuthenticationCredentialsNotFoundException> {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ClusterExceptionMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ClusterExceptionMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ClusterExceptionMapper.java
index a3c9884..2a67cf8 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ClusterExceptionMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ClusterExceptionMapper.java
@@ -20,7 +20,6 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
 import org.apache.nifi.cluster.manager.exception.ClusterException;
-import org.apache.nifi.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/NoResponseFromNodesExceptionMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/NoResponseFromNodesExceptionMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/NoResponseFromNodesExceptionMapper.java
index ce2059c..ceb7360 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/NoResponseFromNodesExceptionMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/NoResponseFromNodesExceptionMapper.java
@@ -26,8 +26,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Maps the exceptional case when the cluster is unable to service a request
- * because no nodes returned a response.
+ * Maps the exceptional case when the cluster is unable to service a request because no nodes returned a response.
  */
 @Provider
 public class NoResponseFromNodesExceptionMapper implements ExceptionMapper<NoResponseFromNodesException> {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ThrowableMapper.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ThrowableMapper.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ThrowableMapper.java
index 7277250..bdf1f00 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ThrowableMapper.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/config/ThrowableMapper.java
@@ -19,7 +19,6 @@ package org.apache.nifi.web.api.config;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
-import org.apache.nifi.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
index bf216c3..999a4a4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
@@ -129,9 +129,6 @@ import org.apache.nifi.controller.service.ControllerServiceReference;
 import org.apache.nifi.reporting.ReportingTask;
 import org.apache.nifi.web.FlowModification;
 
-/**
- *
- */
 public final class DtoFactory {
 
     @SuppressWarnings("rawtypes")
@@ -149,8 +146,8 @@ public final class DtoFactory {
     /**
      * Creates an ActionDTO for the specified Action.
      *
-     * @param action
-     * @return
+     * @param action action
+     * @return dto
      */
     public ActionDTO createActionDto(final Action action) {
         final ActionDTO actionDto = new ActionDTO();
@@ -171,8 +168,8 @@ public final class DtoFactory {
     /**
      * Creates an ActionDetailsDTO for the specified ActionDetails.
      *
-     * @param actionDetails
-     * @return
+     * @param actionDetails details
+     * @return dto
      */
     private ActionDetailsDTO createActionDetailsDto(final ActionDetails actionDetails) {
         if (actionDetails == null) {
@@ -214,8 +211,8 @@ public final class DtoFactory {
     /**
      * Creates a ComponentDetailsDTO for the specified ComponentDetails.
      *
-     * @param componentDetails
-     * @return
+     * @param componentDetails details
+     * @return dto
      */
     private ComponentDetailsDTO createComponentDetailsDto(final ComponentDetails componentDetails) {
         if (componentDetails == null) {
@@ -238,8 +235,8 @@ public final class DtoFactory {
     /**
      * Creates a HistoryDTO from the specified History.
      *
-     * @param history
-     * @return
+     * @param history history
+     * @return dto
      */
     public HistoryDTO createHistoryDto(final History history) {
         final HistoryDTO historyDto = new HistoryDTO();
@@ -260,8 +257,8 @@ public final class DtoFactory {
     /**
      * Creates CounterDTOs for each Counter specified.
      *
-     * @param counterDtos
-     * @return
+     * @param counterDtos dtos
+     * @return dto
      */
     public CountersDTO createCountersDto(final Collection<CounterDTO> counterDtos) {
         final CountersDTO dto = new CountersDTO();
@@ -273,8 +270,8 @@ public final class DtoFactory {
     /**
      * Creates a CounterDTO from the specified Counter.
      *
-     * @param counter
-     * @return
+     * @param counter counter
+     * @return dto
      */
     public CounterDTO createCounterDto(final Counter counter) {
         final CounterDTO dto = new CounterDTO();
@@ -289,8 +286,8 @@ public final class DtoFactory {
     /**
      * Creates a PositionDTO from the specified position
      *
-     * @param position
-     * @return
+     * @param position position
+     * @return dto
      */
     public PositionDTO createPositionDto(final Position position) {
         return new PositionDTO(position.getX(), position.getY());
@@ -299,8 +296,8 @@ public final class DtoFactory {
     /**
      * Creates a ConnectionDTO from the specified Connection.
      *
-     * @param connection
-     * @return
+     * @param connection connection
+     * @return dto
      */
     public ConnectionDTO createConnectionDto(final Connection connection) {
         if (connection == null) {
@@ -358,8 +355,8 @@ public final class DtoFactory {
     /**
      * Creates a ConnectableDTO from the specified Connectable.
      *
-     * @param connectable
-     * @return
+     * @param connectable connectable
+     * @return dto
      */
     public ConnectableDTO createConnectableDto(final Connectable connectable) {
         if (connectable == null) {
@@ -391,8 +388,8 @@ public final class DtoFactory {
     /**
      * Creates a LabelDTO from the specified Label.
      *
-     * @param label
-     * @return
+     * @param label label
+     * @return dto
      */
     public LabelDTO createLabelDto(final Label label) {
         if (label == null) {
@@ -414,8 +411,8 @@ public final class DtoFactory {
     /**
      * Creates a FunnelDTO from the specified Funnel.
      *
-     * @param funnel
-     * @return
+     * @param funnel funnel
+     * @return dto
      */
     public FunnelDTO createFunnelDto(final Funnel funnel) {
         if (funnel == null) {
@@ -433,8 +430,8 @@ public final class DtoFactory {
     /**
      * Creates a SnippetDTO from the specified Snippet.
      *
-     * @param snippet
-     * @return
+     * @param snippet snippet
+     * @return dto
      */
     public SnippetDTO createSnippetDto(final Snippet snippet) {
         final SnippetDTO dto = new SnippetDTO();
@@ -458,8 +455,8 @@ public final class DtoFactory {
     /**
      * Creates a TemplateDTO from the specified template.
      *
-     * @param template
-     * @return
+     * @param template template
+     * @return dto
      */
     public TemplateDTO createTemplateDTO(final Template template) {
         if (template == null) {
@@ -728,8 +725,8 @@ public final class DtoFactory {
     /**
      * Creates a PortStatusDTO for the specified PortStatus.
      *
-     * @param portStatus
-     * @return
+     * @param portStatus status
+     * @return dto
      */
     public PortStatusDTO createPortStatusDto(final PortStatus portStatus) {
         final PortStatusDTO dto = new PortStatusDTO();
@@ -754,8 +751,8 @@ public final class DtoFactory {
     /**
      * Copies the specified snippet.
      *
-     * @param originalSnippet
-     * @return
+     * @param originalSnippet snippet
+     * @return dto
      */
     public FlowSnippetDTO copySnippetContents(FlowSnippetDTO originalSnippet) {
         final FlowSnippetDTO copySnippet = new FlowSnippetDTO();
@@ -807,8 +804,8 @@ public final class DtoFactory {
     /**
      * Creates a PortDTO from the specified Port.
      *
-     * @param port
-     * @return
+     * @param port port
+     * @return dto
      */
     public PortDTO createPortDto(final Port port) {
         if (port == null) {
@@ -1109,8 +1106,8 @@ public final class DtoFactory {
     /**
      * Creates a RemoteProcessGroupDTO from the specified RemoteProcessGroup.
      *
-     * @param group
-     * @return
+     * @param group group
+     * @return dto
      */
     public RemoteProcessGroupDTO createRemoteProcessGroupDto(final RemoteProcessGroup group) {
         if (group == null) {
@@ -1191,8 +1188,8 @@ public final class DtoFactory {
     /**
      * Creates a ProcessGroupDTO from the specified parent ProcessGroup.
      *
-     * @param parentGroup
-     * @return
+     * @param parentGroup group
+     * @return dto
      */
     private ProcessGroupDTO createParentProcessGroupDto(final ProcessGroup parentGroup) {
         if (parentGroup == null) {
@@ -1213,8 +1210,8 @@ public final class DtoFactory {
     /**
      * Creates a ProcessGroupDTO from the specified ProcessGroup.
      *
-     * @param group
-     * @return
+     * @param group group
+     * @return dto
      */
     public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group) {
         return createProcessGroupDto(group, false);
@@ -1223,9 +1220,9 @@ public final class DtoFactory {
     /**
      * Creates a ProcessGroupDTO from the specified ProcessGroup.
      *
-     * @param group
-     * @param recurse
-     * @return
+     * @param group group
+     * @param recurse recurse
+     * @return dto
      */
     public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group, final boolean recurse) {
         final ProcessGroupDTO dto = createConciseProcessGroupDto(group);
@@ -1236,9 +1233,8 @@ public final class DtoFactory {
     /**
      * Creates a ProcessGroupDTO from the specified ProcessGroup.
      *
-     * @param group
-     * @param recurse
-     * @return
+     * @param group group
+     * @return dto
      */
     private ProcessGroupDTO createConciseProcessGroupDto(final ProcessGroup group) {
         if (group == null) {
@@ -1273,9 +1269,9 @@ public final class DtoFactory {
     /**
      * Creates a ProcessGroupContentDTO from the specified ProcessGroup.
      *
-     * @param group
-     * @param recurse
-     * @return
+     * @param group group
+     * @param recurse recurse
+     * @return dto
      */
     private FlowSnippetDTO createProcessGroupContentsDto(final ProcessGroup group, final boolean recurse) {
         if (group == null) {
@@ -1325,9 +1321,6 @@ public final class DtoFactory {
 
     /**
      * Gets the capability description from the specified class.
-     *
-     * @param cls
-     * @return
      */
     @SuppressWarnings("deprecation")
     private String getCapabilityDescription(final Class<?> cls) {
@@ -1344,9 +1337,6 @@ public final class DtoFactory {
 
     /**
      * Gets the tags from the specified class.
-     *
-     * @param cls
-     * @return
      */
     @SuppressWarnings("deprecation")
     private Set<String> getTags(final Class<?> cls) {
@@ -1371,8 +1361,8 @@ public final class DtoFactory {
     /**
      * Gets the DocumentedTypeDTOs from the specified classes.
      *
-     * @param classes
-     * @return
+     * @param classes classes
+     * @return dtos
      */
     @SuppressWarnings("rawtypes")
     public Set<DocumentedTypeDTO> fromDocumentedTypes(final Set<Class> classes) {
@@ -1394,8 +1384,8 @@ public final class DtoFactory {
     /**
      * Creates a ProcessorDTO from the specified ProcessorNode.
      *
-     * @param node
-     * @return
+     * @param node node
+     * @return dto
      */
     public ProcessorDTO createProcessorDto(final ProcessorNode node) {
         if (node == null) {
@@ -1454,8 +1444,8 @@ public final class DtoFactory {
     /**
      * Creates a BulletinBoardDTO for the specified bulletins.
      *
-     * @param bulletins
-     * @return
+     * @param bulletins bulletins
+     * @return dto
      */
     public BulletinBoardDTO createBulletinBoardDto(final List<BulletinDTO> bulletins) {
         // sort the bulletins
@@ -1494,8 +1484,8 @@ public final class DtoFactory {
     /**
      * Creates a BulletinDTO for the specified Bulletin.
      *
-     * @param bulletin
-     * @return
+     * @param bulletin bulletin
+     * @return dto
      */
     public BulletinDTO createBulletinDto(final Bulletin bulletin) {
         final BulletinDTO dto = new BulletinDTO();
@@ -1514,8 +1504,8 @@ public final class DtoFactory {
     /**
      * Creates a ProvenanceEventNodeDTO for the specified ProvenanceEventLineageNode.
      *
-     * @param node
-     * @return
+     * @param node node
+     * @return dto
      */
     public ProvenanceNodeDTO createProvenanceEventNodeDTO(final ProvenanceEventLineageNode node) {
         final ProvenanceNodeDTO dto = new ProvenanceNodeDTO();
@@ -1534,8 +1524,8 @@ public final class DtoFactory {
     /**
      * Creates a FlowFileNodeDTO for the specified LineageNode.
      *
-     * @param node
-     * @return
+     * @param node node
+     * @return dto
      */
     public ProvenanceNodeDTO createFlowFileNodeDTO(final LineageNode node) {
         final ProvenanceNodeDTO dto = new ProvenanceNodeDTO();
@@ -1551,8 +1541,8 @@ public final class DtoFactory {
     /**
      * Creates a ProvenanceLinkDTO for the specified LineageEdge.
      *
-     * @param edge
-     * @return
+     * @param edge edge
+     * @return dto
      */
     public ProvenanceLinkDTO createProvenanceLinkDTO(final LineageEdge edge) {
         final LineageNode source = edge.getSource();
@@ -1570,8 +1560,8 @@ public final class DtoFactory {
     /**
      * Creates a LineageDTO for the specified Lineage.
      *
-     * @param computeLineageSubmission
-     * @return
+     * @param computeLineageSubmission submission
+     * @return dto
      */
     public LineageDTO createLineageDto(final ComputeLineageSubmission computeLineageSubmission) {
         // build the lineage dto
@@ -1651,8 +1641,8 @@ public final class DtoFactory {
     /**
      * Creates a SystemDiagnosticsDTO for the specified SystemDiagnostics.
      *
-     * @param sysDiagnostics
-     * @return
+     * @param sysDiagnostics diags
+     * @return dto
      */
     public SystemDiagnosticsDTO createSystemDiagnosticsDto(final SystemDiagnostics sysDiagnostics) {
 
@@ -1705,9 +1695,9 @@ public final class DtoFactory {
     /**
      * Creates a StorageUsageDTO from the specified StorageUsage.
      *
-     * @param identifier
-     * @param storageUsage
-     * @return
+     * @param identifier id
+     * @param storageUsage usage
+     * @return dto
      */
     public SystemDiagnosticsDTO.StorageUsageDTO createStorageUsageDTO(final String identifier, final StorageUsage storageUsage) {
         final SystemDiagnosticsDTO.StorageUsageDTO dto = new SystemDiagnosticsDTO.StorageUsageDTO();
@@ -1725,9 +1715,9 @@ public final class DtoFactory {
     /**
      * Creates a GarbageCollectionDTO from the specified GarbageCollection.
      *
-     * @param name
-     * @param garbageCollection
-     * @return
+     * @param name name
+     * @param garbageCollection gc
+     * @return dto
      */
     public SystemDiagnosticsDTO.GarbageCollectionDTO createGarbageCollectionDTO(final String name, final GarbageCollection garbageCollection) {
         final SystemDiagnosticsDTO.GarbageCollectionDTO dto = new SystemDiagnosticsDTO.GarbageCollectionDTO();
@@ -1740,8 +1730,8 @@ public final class DtoFactory {
     /**
      * Creates a ProcessorConfigDTO from the specified ProcessorNode.
      *
-     * @param procNode
-     * @return
+     * @param procNode node
+     * @return dto
      */
     public ProcessorConfigDTO createProcessorConfigDto(final ProcessorNode procNode) {
         if (procNode == null) {
@@ -1818,8 +1808,8 @@ public final class DtoFactory {
     /**
      * Creates a PropertyDesriptorDTO from the specified PropertyDesriptor.
      *
-     * @param propertyDescriptor
-     * @return
+     * @param propertyDescriptor descriptor
+     * @return dto
      */
     public PropertyDescriptorDTO createPropertyDescriptorDto(final PropertyDescriptor propertyDescriptor) {
         if (propertyDescriptor == null) {
@@ -1873,9 +1863,7 @@ public final class DtoFactory {
         return dto;
     }
 
-    // 
     // Copy methods
-    //
     public LabelDTO copy(final LabelDTO original) {
         final LabelDTO copy = new LabelDTO();
         copy.setId(original.getId());
@@ -2164,11 +2152,11 @@ public final class DtoFactory {
 
     /**
      *
-     * @param original
+     * @param original orig
      * @param deep if <code>true</code>, all Connections, ProcessGroups, Ports, Processors, etc. will be copied. If <code>false</code>, the copy will have links to the same objects referenced by
      * <code>original</code>.
      *
-     * @return
+     * @return dto
      */
     private FlowSnippetDTO copy(final FlowSnippetDTO original, final boolean deep) {
         final FlowSnippetDTO copy = new FlowSnippetDTO();
@@ -2294,8 +2282,8 @@ public final class DtoFactory {
     /**
      * Factory method for creating a new RevisionDTO based on this controller.
      *
-     * @param lastMod
-     * @return
+     * @param lastMod mod
+     * @return dto
      */
     public RevisionDTO createRevisionDTO(FlowModification lastMod) {
         final Revision revision = lastMod.getRevision();
@@ -2312,8 +2300,8 @@ public final class DtoFactory {
     /**
      * Factory method for creating a new user transfer object.
      *
-     * @param user
-     * @return
+     * @param user user
+     * @return dto
      */
     public UserDTO createUserDTO(NiFiUser user) {
         // convert the users authorities

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/package-info.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/package-info.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/package-info.java
index b75d13d..0392ca4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/package-info.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/package-info.java
@@ -16,41 +16,26 @@
  */
 /**
  * <p>
- * The NiFi REST API allows clients to obtain and update configuration and
- * status information pertaining to an instance of NiFi. The links below detail
- * each resource. Follow the link to get more information about the resource
- * including the supported HTTP methods and the expected parameters.</p>
+ * The NiFi REST API allows clients to obtain and update configuration and status information pertaining to an instance of NiFi. The links below detail each resource. Follow the link to get more
+ * information about the resource including the supported HTTP methods and the expected parameters.</p>
  *
  * <p>
- * Additionally, the documentation for each resource will describe what type of
- * data should be returned from a successful invocation. However, if the request
- * is not successful one of the follow status codes should be returned:</p>
+ * Additionally, the documentation for each resource will describe what type of data should be returned from a successful invocation. However, if the request is not successful one of the follow status
+ * codes should be returned:</p>
  *
  * <ul>
- * <li>400 (Bad Request) - A 400 status code will be returned when NiFi is
- * unable to complete the request because it was invalid. The request should not
- * be retried without modification.</li>
- * <li>401 (Unathorized) - A 401 status code indicates that the user is not
- * known to this NiFi instance. The user may submit an account request.</li>
- * <li>403 (Forbidden) - A 403 status code indicates that the user is known to
- * this NiFi instance and they do not have authority to perform the requested
- * action.</li>
- * <li>404 (Not Found) - A 404 status code will be returned when the desired
- * resource does not exist.</li>
- * <li>409 (Conflict) - NiFi employs an optimistic locking strategy where the
- * client must include a revision in their request when performing an update. If
- * the specified revision does not match the current base revision a 409 status
- * code is returned. Additionally, a 409 is used when the state of the system
- * does not allow for the request at that time. This same request may be
- * successful later if the system is in a different state (e.g. cannot delete a
- * processor because it is currently running).</li>
- * <li>500 (Internal Server Error) - A 500 status code indicates that an
- * unexpected error has occurred.</li>
+ * <li>400 (Bad Request) - A 400 status code will be returned when NiFi is unable to complete the request because it was invalid. The request should not be retried without modification.</li>
+ * <li>401 (Unathorized) - A 401 status code indicates that the user is not known to this NiFi instance. The user may submit an account request.</li>
+ * <li>403 (Forbidden) - A 403 status code indicates that the user is known to this NiFi instance and they do not have authority to perform the requested action.</li>
+ * <li>404 (Not Found) - A 404 status code will be returned when the desired resource does not exist.</li>
+ * <li>409 (Conflict) - NiFi employs an optimistic locking strategy where the client must include a revision in their request when performing an update. If the specified revision does not match the
+ * current base revision a 409 status code is returned. Additionally, a 409 is used when the state of the system does not allow for the request at that time. This same request may be successful later
+ * if the system is in a different state (e.g. cannot delete a processor because it is currently running).</li>
+ * <li>500 (Internal Server Error) - A 500 status code indicates that an unexpected error has occurred.</li>
  * </ul>
  *
  * <p>
- * Most unsuccessful requests will include a description of the problem in the
- * entity body of the response.</p>
+ * Most unsuccessful requests will include a description of the problem in the entity body of the response.</p>
  *
  * <p>
  * The context path for the REST API is /nifi-api</p>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/BulletinBoardPatternParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/BulletinBoardPatternParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/BulletinBoardPatternParameter.java
index 98bfbeb..c34ec59 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/BulletinBoardPatternParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/BulletinBoardPatternParameter.java
@@ -20,8 +20,7 @@ import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
 /**
- * Parameter class that auto [ap|pre]pends '.*' to the specified pattern to make
- * user input more user friendly.
+ * Parameter class that auto [ap|pre]pends '.*' to the specified pattern to make user input more user friendly.
  */
 public class BulletinBoardPatternParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java
index c0a39c2..85a0a29 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/ClientIdParameter.java
@@ -20,8 +20,7 @@ import java.util.UUID;
 import org.apache.commons.lang3.StringUtils;
 
 /**
- * Class for parsing handling client ids. If the client id is not specified, one
- * will be generated.
+ * Class for parsing handling client ids. If the client id is not specified, one will be generated.
  */
 public class ClientIdParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DateTimeParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DateTimeParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DateTimeParameter.java
index 726f031..3480cd0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DateTimeParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DateTimeParameter.java
@@ -22,8 +22,7 @@ import java.util.Date;
 import java.util.Locale;
 
 /**
- * Class for parsing integer parameters and providing a user friendly error
- * message.
+ * Class for parsing integer parameters and providing a user friendly error message.
  */
 public class DateTimeParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DoubleParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DoubleParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DoubleParameter.java
index eba046b..8ea7e38 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DoubleParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/DoubleParameter.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.web.api.request;
 
 /**
- * Class for parsing double parameters and providing a user friendly error
- * message.
+ * Class for parsing double parameters and providing a user friendly error message.
  */
 public class DoubleParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/IntegerParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/IntegerParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/IntegerParameter.java
index cfbdbd8..76e8087 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/IntegerParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/IntegerParameter.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.web.api.request;
 
 /**
- * Class for parsing integer parameters and providing a user friendly error
- * message.
+ * Class for parsing integer parameters and providing a user friendly error message.
  */
 public class IntegerParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/LongParameter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/LongParameter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/LongParameter.java
index eef28fe..ba2def9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/LongParameter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/request/LongParameter.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.web.api.request;
 
 /**
- * Class for parsing long parameters and providing a user friendly error
- * message.
+ * Class for parsing long parameters and providing a user friendly error message.
  */
 public class LongParameter {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/contextlistener/ApplicationStartupContextListener.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/contextlistener/ApplicationStartupContextListener.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/contextlistener/ApplicationStartupContextListener.java
index de54dd2..a853aca 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/contextlistener/ApplicationStartupContextListener.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/contextlistener/ApplicationStartupContextListener.java
@@ -32,12 +32,8 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.web.context.support.WebApplicationContextUtils;
 
 /**
- * Application context listener for starting the application. If the application
- * is configured for a standalone environment or the application is a node in a
- * clustered environment then a flow controller is created and managed.
- * Otherwise, we assume the application is running as the cluster manager in a
- * clustered environment. In this case, the cluster manager is created and
- * managed.
+ * Application context listener for starting the application. If the application is configured for a standalone environment or the application is a node in a clustered environment then a flow
+ * controller is created and managed. Otherwise, we assume the application is running as the cluster manager in a clustered environment. In this case, the cluster manager is created and managed.
  *
  */
 public class ApplicationStartupContextListener implements ServletContextListener {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/controller/ControllerFacade.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/controller/ControllerFacade.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/controller/ControllerFacade.java
index 0f384e3..b614f0a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/controller/ControllerFacade.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/controller/ControllerFacade.java
@@ -118,9 +118,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.access.AccessDeniedException;
 
-/**
- *
- */
 public class ControllerFacade {
 
     private static final Logger logger = LoggerFactory.getLogger(ControllerFacade.class);
@@ -144,8 +141,8 @@ public class ControllerFacade {
     /**
      * Returns the group id that contains the specified processor.
      *
-     * @param processorId
-     * @return
+     * @param processorId processor id
+     * @return group id
      */
     public String findProcessGroupIdForProcessor(String processorId) {
         final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId());
@@ -160,7 +157,7 @@ public class ControllerFacade {
     /**
      * Sets the name of this controller.
      *
-     * @param name
+     * @param name name
      */
     public void setName(String name) {
         flowController.setName(name);
@@ -169,7 +166,7 @@ public class ControllerFacade {
     /**
      * Sets the comments of this controller.
      *
-     * @param comments
+     * @param comments comments
      */
     public void setComments(String comments) {
         flowController.setComments(comments);
@@ -178,7 +175,7 @@ public class ControllerFacade {
     /**
      * Sets the max timer driven thread count of this controller.
      *
-     * @param maxTimerDrivenThreadCount
+     * @param maxTimerDrivenThreadCount count
      */
     public void setMaxTimerDrivenThreadCount(int maxTimerDrivenThreadCount) {
         flowController.setMaxTimerDrivenThreadCount(maxTimerDrivenThreadCount);
@@ -187,7 +184,7 @@ public class ControllerFacade {
     /**
      * Sets the max event driven thread count of this controller.
      *
-     * @param maxEventDrivenThreadCount
+     * @param maxEventDrivenThreadCount count
      */
     public void setMaxEventDrivenThreadCount(int maxEventDrivenThreadCount) {
         flowController.setMaxEventDrivenThreadCount(maxEventDrivenThreadCount);
@@ -196,7 +193,7 @@ public class ControllerFacade {
     /**
      * Gets the root group id.
      *
-     * @return
+     * @return group id
      */
     public String getRootGroupId() {
         return flowController.getRootGroupId();
@@ -205,7 +202,7 @@ public class ControllerFacade {
     /**
      * Gets the input ports on the root group.
      *
-     * @return
+     * @return input ports
      */
     public Set<RootGroupPort> getInputPorts() {
         final Set<RootGroupPort> inputPorts = new HashSet<>();
@@ -221,7 +218,7 @@ public class ControllerFacade {
     /**
      * Gets the output ports on the root group.
      *
-     * @return
+     * @return output ports
      */
     public Set<RootGroupPort> getOutputPorts() {
         final Set<RootGroupPort> outputPorts = new HashSet<>();
@@ -237,9 +234,9 @@ public class ControllerFacade {
     /**
      * Returns the status history for the specified processor.
      *
-     * @param groupId
-     * @param processorId
-     * @return
+     * @param groupId group id
+     * @param processorId processor id
+     * @return status history
      */
     public StatusHistoryDTO getProcessorStatusHistory(final String groupId, final String processorId) {
         return flowController.getProcessorStatusHistory(processorId);
@@ -248,9 +245,9 @@ public class ControllerFacade {
     /**
      * Returns the status history for the specified connection.
      *
-     * @param groupId
-     * @param connectionId
-     * @return
+     * @param groupId group id
+     * @param connectionId connection id
+     * @return status history
      */
     public StatusHistoryDTO getConnectionStatusHistory(final String groupId, final String connectionId) {
         return flowController.getConnectionStatusHistory(connectionId);
@@ -259,8 +256,8 @@ public class ControllerFacade {
     /**
      * Returns the status history for the specified process group.
      *
-     * @param groupId
-     * @return
+     * @param groupId group id
+     * @return status history
      */
     public StatusHistoryDTO getProcessGroupStatusHistory(final String groupId) {
         return flowController.getProcessGroupStatusHistory(groupId);
@@ -269,9 +266,9 @@ public class ControllerFacade {
     /**
      * Returns the status history for the specified remote process group.
      *
-     * @param groupId
-     * @param remoteProcessGroupId
-     * @return
+     * @param groupId group id
+     * @param remoteProcessGroupId remote process group id
+     * @return status history
      */
     public StatusHistoryDTO getRemoteProcessGroupStatusHistory(final String groupId, final String remoteProcessGroupId) {
         return flowController.getRemoteProcessGroupStatusHistory(remoteProcessGroupId);
@@ -280,12 +277,15 @@ public class ControllerFacade {
     /**
      * Get the node id of this controller.
      *
-     * @return
+     * @return node identifier
      */
     public NodeIdentifier getNodeId() {
         return flowController.getNodeId();
     }
 
+    /**
+     * @return true if is clustered
+     */
     public boolean isClustered() {
         return flowController.isClustered();
     }
@@ -293,7 +293,7 @@ public class ControllerFacade {
     /**
      * Gets the name of this controller.
      *
-     * @return
+     * @return name
      */
     public String getName() {
         return flowController.getName();
@@ -306,7 +306,7 @@ public class ControllerFacade {
     /**
      * Gets the comments of this controller.
      *
-     * @return
+     * @return comments
      */
     public String getComments() {
         return flowController.getComments();
@@ -315,7 +315,7 @@ public class ControllerFacade {
     /**
      * Gets the max timer driven thread count of this controller.
      *
-     * @return
+     * @return count
      */
     public int getMaxTimerDrivenThreadCount() {
         return flowController.getMaxTimerDrivenThreadCount();
@@ -324,7 +324,7 @@ public class ControllerFacade {
     /**
      * Gets the max event driven thread count of this controller.
      *
-     * @return
+     * @return count
      */
     public int getMaxEventDrivenThreadCount() {
         return flowController.getMaxEventDrivenThreadCount();
@@ -333,7 +333,7 @@ public class ControllerFacade {
     /**
      * Gets the FlowFileProcessor types that this controller supports.
      *
-     * @return
+     * @return types
      */
     public Set<DocumentedTypeDTO> getFlowFileProcessorTypes() {
         return dtoFactory.fromDocumentedTypes(ExtensionManager.getExtensions(Processor.class));
@@ -342,7 +342,7 @@ public class ControllerFacade {
     /**
      * Gets the FlowFileComparator types that this controller supports.
      *
-     * @return
+     * @return the FlowFileComparator types that this controller supports
      */
     public Set<DocumentedTypeDTO> getFlowFileComparatorTypes() {
         return dtoFactory.fromDocumentedTypes(ExtensionManager.getExtensions(FlowFilePrioritizer.class));
@@ -351,9 +351,9 @@ public class ControllerFacade {
     /**
      * Returns whether the specified type implements the specified serviceType.
      *
-     * @param baseType
-     * @param type
-     * @return
+     * @param serviceType type
+     * @param type type
+     * @return whether the specified type implements the specified serviceType
      */
     private boolean implementsServiceType(final String serviceType, final Class type) {
         final List<Class<?>> interfaces = ClassUtils.getAllInterfaces(type);
@@ -369,8 +369,8 @@ public class ControllerFacade {
     /**
      * Gets the ControllerService types that this controller supports.
      *
-     * @param serviceType
-     * @return
+     * @param serviceType type
+     * @return the ControllerService types that this controller supports
      */
     public Set<DocumentedTypeDTO> getControllerServiceTypes(final String serviceType) {
         final Set<Class> serviceImplementations = ExtensionManager.getExtensions(ControllerService.class);
@@ -396,7 +396,7 @@ public class ControllerFacade {
     /**
      * Gets the ReportingTask types that this controller supports.
      *
-     * @return
+     * @return the ReportingTask types that this controller supports
      */
     public Set<DocumentedTypeDTO> getReportingTaskTypes() {
         return dtoFactory.fromDocumentedTypes(ExtensionManager.getExtensions(ReportingTask.class));
@@ -405,7 +405,7 @@ public class ControllerFacade {
     /**
      * Gets the counters for this controller.
      *
-     * @return
+     * @return the counters for this controller
      */
     public List<Counter> getCounters() {
         return flowController.getCounters();
@@ -414,8 +414,8 @@ public class ControllerFacade {
     /**
      * Resets the counter with the specified id.
      *
-     * @param id
-     * @return
+     * @param id id
+     * @return the counter with the specified id
      */
     public Counter resetCounter(final String id) {
         final Counter counter = flowController.resetCounter(id);
@@ -430,7 +430,7 @@ public class ControllerFacade {
     /**
      * Gets the status of this controller.
      *
-     * @return
+     * @return the status of this controller
      */
     public ControllerStatusDTO getControllerStatus() {
         final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId());
@@ -462,8 +462,8 @@ public class ControllerFacade {
     /**
      * Gets the status for the specified process group.
      *
-     * @param groupId
-     * @return
+     * @param groupId group id
+     * @return the status for the specified process group
      */
     public ProcessGroupStatusDTO getProcessGroupStatus(final String groupId) {
         final ProcessGroupStatus processGroupStatus = flowController.getGroupStatus(groupId);
@@ -476,7 +476,7 @@ public class ControllerFacade {
     /**
      * Gets the BulletinRepository.
      *
-     * @return
+     * @return the BulletinRepository
      */
     public BulletinRepository getBulletinRepository() {
         return flowController.getBulletinRepository();
@@ -485,7 +485,7 @@ public class ControllerFacade {
     /**
      * Saves the state of the flow controller.
      *
-     * @throws NiFiCoreException
+     * @throws NiFiCoreException ex
      */
     public void save() throws NiFiCoreException {
         // save the flow controller
@@ -494,40 +494,36 @@ public class ControllerFacade {
     }
 
     /**
-     * Returns the socket port that the Cluster Manager is listening on for
-     * Site-to-Site communications
+     * Returns the socket port that the Cluster Manager is listening on for Site-to-Site communications
      *
-     * @return
+     * @return the socket port that the Cluster Manager is listening on for Site-to-Site communications
      */
     public Integer getClusterManagerRemoteSiteListeningPort() {
         return flowController.getClusterManagerRemoteSiteListeningPort();
     }
 
     /**
-     * Indicates whether or not Site-to-Site communications with the Cluster
-     * Manager are secure
+     * Indicates whether or not Site-to-Site communications with the Cluster Manager are secure
      *
-     * @return
+     * @return whether or not Site-to-Site communications with the Cluster Manager are secure
      */
     public Boolean isClusterManagerRemoteSiteCommsSecure() {
         return flowController.isClusterManagerRemoteSiteCommsSecure();
     }
 
     /**
-     * Returns the socket port that the local instance is listening on for
-     * Site-to-Site communications
+     * Returns the socket port that the local instance is listening on for Site-to-Site communications
      *
-     * @return
+     * @return the socket port that the local instance is listening on for Site-to-Site communications
      */
     public Integer getRemoteSiteListeningPort() {
         return flowController.getRemoteSiteListeningPort();
     }
 
     /**
-     * Indicates whether or not Site-to-Site communications with the local
-     * instance are secure
+     * Indicates whether or not Site-to-Site communications with the local instance are secure
      *
-     * @return
+     * @return whether or not Site-to-Site communications with the local instance are secure
      */
     public Boolean isRemoteSiteCommsSecure() {
         return flowController.isRemoteSiteCommsSecure();
@@ -536,7 +532,7 @@ public class ControllerFacade {
     /**
      * Returns a SystemDiagnostics that describes the current state of the node
      *
-     * @return
+     * @return a SystemDiagnostics that describes the current state of the node
      */
     public SystemDiagnostics getSystemDiagnostics() {
         return flowController.getSystemDiagnostics();
@@ -545,7 +541,7 @@ public class ControllerFacade {
     /**
      * Gets the available options for searching provenance.
      *
-     * @return
+     * @return the available options for searching provenance
      */
     public ProvenanceOptionsDTO getProvenanceSearchOptions() {
         final ProvenanceEventRepository provenanceRepository = flowController.getProvenanceRepository();
@@ -578,8 +574,8 @@ public class ControllerFacade {
     /**
      * Submits a provenance query.
      *
-     * @param provenanceDto
-     * @return
+     * @param provenanceDto dto
+     * @return provenance info
      */
     public ProvenanceDTO submitProvenance(ProvenanceDTO provenanceDto) {
         final ProvenanceRequestDTO requestDto = provenanceDto.getRequest();
@@ -632,8 +628,8 @@ public class ControllerFacade {
     /**
      * Retrieves the results of a provenance query.
      *
-     * @param provenanceId
-     * @return
+     * @param provenanceId id
+     * @return the results of a provenance query
      */
     public ProvenanceDTO getProvenanceQuery(String provenanceId) {
         try {
@@ -717,8 +713,8 @@ public class ControllerFacade {
     /**
      * Submits the specified lineage request.
      *
-     * @param lineageDto
-     * @return
+     * @param lineageDto dto
+     * @return updated lineage
      */
     public LineageDTO submitLineage(LineageDTO lineageDto) {
         final LineageRequestDTO requestDto = lineageDto.getRequest();
@@ -746,8 +742,8 @@ public class ControllerFacade {
     /**
      * Gets the lineage with the specified id.
      *
-     * @param lineageId
-     * @return
+     * @param lineageId id
+     * @return the lineage with the specified id
      */
     public LineageDTO getLineage(final String lineageId) {
         // get the query to the provenance repository
@@ -765,7 +761,7 @@ public class ControllerFacade {
     /**
      * Deletes the query with the specified id.
      *
-     * @param provenanceId
+     * @param provenanceId id
      */
     public void deleteProvenanceQuery(final String provenanceId) {
         // get the query to the provenance repository
@@ -779,7 +775,7 @@ public class ControllerFacade {
     /**
      * Deletes the lineage with the specified id.
      *
-     * @param lineageId
+     * @param lineageId id
      */
     public void deleteLineage(final String lineageId) {
         // get the query to the provenance repository
@@ -793,10 +789,10 @@ public class ControllerFacade {
     /**
      * Gets the content for the specified claim.
      *
-     * @param eventId
-     * @param uri
-     * @param contentDirection
-     * @return
+     * @param eventId event id
+     * @param uri uri
+     * @param contentDirection direction
+     * @return the content for the specified claim
      */
     public DownloadableContent getContent(final Long eventId, final String uri, final ContentDirection contentDirection) {
         try {
@@ -856,8 +852,8 @@ public class ControllerFacade {
     /**
      * Submits a replay request for the specified event id.
      *
-     * @param eventId
-     * @return
+     * @param eventId event id
+     * @return provenance event
      */
     public ProvenanceEventDTO submitReplay(final Long eventId) {
         try {
@@ -885,8 +881,8 @@ public class ControllerFacade {
     /**
      * Get the provenance event with the specified event id.
      *
-     * @param eventId
-     * @return
+     * @param eventId event id
+     * @return the provenance event with the specified event id
      */
     public ProvenanceEventDTO getProvenanceEvent(final Long eventId) {
         try {
@@ -905,8 +901,8 @@ public class ControllerFacade {
     /**
      * Creates a ProvenanceEventDTO for the specified ProvenanceEventRecord.
      *
-     * @param event
-     * @return
+     * @param event event
+     * @return event
      */
     private ProvenanceEventDTO createProvenanceEventDto(final ProvenanceEventRecord event) {
         // convert the attributes
@@ -1018,12 +1014,6 @@ public class ControllerFacade {
         return dto;
     }
 
-    /**
-     * Gets the name for the component with the specified id.
-     *
-     * @param dto
-     * @return
-     */
     private void setComponentDetails(final ProvenanceEventDTO dto) {
         final ProcessGroup root = flowController.getGroup(flowController.getRootGroupId());
 
@@ -1037,8 +1027,8 @@ public class ControllerFacade {
     /**
      * Searches this controller for the specified term.
      *
-     * @param search
-     * @return
+     * @param search search
+     * @return result
      */
     public SearchResultsDTO search(final String search) {
         final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId());

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ControllerServiceDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ControllerServiceDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ControllerServiceDAO.java
index 52cba66..bb18b1b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ControllerServiceDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ControllerServiceDAO.java
@@ -81,8 +81,7 @@ public interface ControllerServiceDAO {
     void verifyUpdate(ControllerServiceDTO controllerServiceDTO);
 
     /**
-     * Determines whether the referencing component of the specified controller
-     * service can be updated.
+     * Determines whether the referencing component of the specified controller service can be updated.
      *
      * @param controllerServiceId service id
      * @param scheduledState scheduled state

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/PortDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/PortDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/PortDAO.java
index 33ae2bd..1df13e5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/PortDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/PortDAO.java
@@ -25,7 +25,7 @@ public interface PortDAO {
 
     /**
      * @param groupId group id
-     * @param portId  port id
+     * @param portId port id
      * @return Determines if the specified port exists in the specified group
      */
     boolean hasPort(String groupId, String portId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessGroupDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessGroupDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessGroupDAO.java
index 2cd8506..3655083 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessGroupDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessGroupDAO.java
@@ -20,16 +20,13 @@ import java.util.Set;
 import org.apache.nifi.groups.ProcessGroup;
 import org.apache.nifi.web.api.dto.ProcessGroupDTO;
 
-/**
- *
- */
 public interface ProcessGroupDAO {
 
     /**
      * Determines if the specified remote process group exists.
      *
-     * @param groupId
-     * @return
+     * @param groupId id
+     * @return true if group exists
      */
     boolean hasProcessGroup(String groupId);
 
@@ -61,7 +58,7 @@ public interface ProcessGroupDAO {
     /**
      * Verifies the specified process group can be modified.
      *
-     * @param processGroupDTO
+     * @param processGroupDTO dto
      */
     void verifyUpdate(ProcessGroupDTO processGroupDTO);
 
@@ -76,7 +73,7 @@ public interface ProcessGroupDAO {
     /**
      * Verifies the specified process group can be removed.
      *
-     * @param groupId
+     * @param groupId id
      */
     void verifyDelete(String groupId);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessorDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessorDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessorDAO.java
index 8f2416a..30cce5d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessorDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ProcessorDAO.java
@@ -64,8 +64,7 @@ public interface ProcessorDAO {
     void verifyUpdate(String groupId, ProcessorDTO processorDTO);
 
     /**
-     * Updates the configuration for the processor using the specified
-     * processorDTO.
+     * Updates the configuration for the processor using the specified processorDTO.
      *
      * @param groupId group id
      * @param processorDTO processor

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/RemoteProcessGroupDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/RemoteProcessGroupDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/RemoteProcessGroupDAO.java
index cf1ac30..d9eafb0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/RemoteProcessGroupDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/RemoteProcessGroupDAO.java
@@ -100,7 +100,7 @@ public interface RemoteProcessGroupDAO {
      * @param groupId id
      * @param remoteProcessGroupId id
      * @param remoteProcessGroupPort port
-     * @return
+     * @return updated group port
      */
     RemoteGroupPort updateRemoteProcessGroupInputPort(String groupId, String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPort);
 
@@ -110,7 +110,7 @@ public interface RemoteProcessGroupDAO {
      * @param groupId group id
      * @param remoteProcessGroupId group id
      * @param remoteProcessGroupPort port
-     * @return
+     * @return group port
      */
     RemoteGroupPort updateRemoteProcessGroupOutputPort(String groupId, String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPort);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ReportingTaskDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ReportingTaskDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ReportingTaskDAO.java
index cbdd4a1..43852c2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ReportingTaskDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/ReportingTaskDAO.java
@@ -21,16 +21,13 @@ import org.apache.nifi.controller.ReportingTaskNode;
 
 import org.apache.nifi.web.api.dto.ReportingTaskDTO;
 
-/**
- *
- */
 public interface ReportingTaskDAO {
 
     /**
      * Determines if the specified reporting task exists.
      *
-     * @param reportingTaskId
-     * @return
+     * @param reportingTaskId id
+     * @return true if reporting task exists
      */
     boolean hasReportingTask(String reportingTaskId);
 
@@ -68,14 +65,14 @@ public interface ReportingTaskDAO {
     /**
      * Determines whether this reporting task can be updated.
      *
-     * @param reportingTaskDTO
+     * @param reportingTaskDTO dto
      */
     void verifyUpdate(ReportingTaskDTO reportingTaskDTO);
 
     /**
      * Determines whether this reporting task can be removed.
      *
-     * @param reportingTaskId
+     * @param reportingTaskId id
      */
     void verifyDelete(String reportingTaskId);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/SnippetDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/SnippetDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/SnippetDAO.java
index 9ea60cb..a3cbed4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/SnippetDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/SnippetDAO.java
@@ -23,8 +23,7 @@ import org.apache.nifi.web.api.dto.SnippetDTO;
 public interface SnippetDAO {
 
     /**
-     * Copies the specified snippet and added the copy to the flow in the
-     * specified group.
+     * Copies the specified snippet and added the copy to the flow in the specified group.
      *
      * @param groupId group id
      * @param snippetId snippet id

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/TemplateDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/TemplateDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/TemplateDAO.java
index 5264119..014a607 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/TemplateDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/TemplateDAO.java
@@ -21,9 +21,6 @@ import org.apache.nifi.controller.Template;
 import org.apache.nifi.web.api.dto.FlowSnippetDTO;
 import org.apache.nifi.web.api.dto.TemplateDTO;
 
-/**
- *
- */
 public interface TemplateDAO {
 
     /**
@@ -37,19 +34,19 @@ public interface TemplateDAO {
     /**
      * Import the specified template.
      *
-     * @param templateDTO
-     * @return
+     * @param templateDTO dto
+     * @return template
      */
     Template importTemplate(TemplateDTO templateDTO);
 
     /**
      * Instantiate the corresponding template.
      *
-     * @param groupId
-     * @param originX
-     * @param originY
-     * @param templateId
-     * @return
+     * @param groupId group id
+     * @param originX x
+     * @param originY y
+     * @param templateId template id
+     * @return flow snippet
      */
     FlowSnippetDTO instantiateTemplate(String groupId, Double originX, Double originY, String templateId);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/ComponentDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/ComponentDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/ComponentDAO.java
index a2c9567..165e7a5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/ComponentDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/ComponentDAO.java
@@ -20,17 +20,14 @@ import org.apache.nifi.controller.FlowController;
 import org.apache.nifi.groups.ProcessGroup;
 import org.apache.nifi.web.ResourceNotFoundException;
 
-/**
- *
- */
 public abstract class ComponentDAO {
 
     /**
      * Returns whether the specified object is not null.
      *
-     * @param <T>
-     * @param object
-     * @return
+     * @param <T> type
+     * @param object object
+     * @return true if the specified object is not null
      */
     protected <T> boolean isNotNull(T object) {
         return object != null;
@@ -39,9 +36,9 @@ public abstract class ComponentDAO {
     /**
      * Returns whether any of the specified objects are not null.
      *
-     * @param <T>
-     * @param objects
-     * @return
+     * @param <T> type
+     * @param objects objects
+     * @return true if any of the specified objects are not null
      */
     protected <T> boolean isAnyNotNull(T... objects) {
         for (final T object : objects) {
@@ -56,9 +53,9 @@ public abstract class ComponentDAO {
     /**
      * Locates the specified ProcessGroup.
      *
-     * @param flowController
-     * @param groupId
-     * @return
+     * @param flowController controller
+     * @param groupId id
+     * @return group
      */
     protected ProcessGroup locateProcessGroup(FlowController flowController, String groupId) {
         ProcessGroup group = flowController.getGroup(groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
index a984867..5fbc393 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
@@ -63,23 +63,11 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         return connection;
     }
 
-    /**
-     * Gets the specified connection.
-     *
-     * @return The connections
-     */
     @Override
     public Connection getConnection(final String groupId, final String id) {
         return locateConnection(groupId, id);
     }
 
-    /**
-     * Gets the connections for the specified source processor.
-     *
-     * @param groupId
-     * @param processorId
-     * @return
-     */
     @Override
     public Set<Connection> getConnectionsForSource(final String groupId, final String processorId) {
         final Set<Connection> connections = new HashSet<>(getConnections(groupId));
@@ -93,13 +81,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         return connections;
     }
 
-    /**
-     * Determines if the specified connection exists. Returns false when either
-     * the group or connection do not exist.
-     *
-     * @param id
-     * @return
-     */
     @Override
     public boolean hasConnection(final String groupId, final String id) {
         final ProcessGroup group = flowController.getGroup(groupId);
@@ -111,11 +92,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         return group.getConnection(id) != null;
     }
 
-    /**
-     * Gets all of the connections.
-     *
-     * @return The connections
-     */
     @Override
     public Set<Connection> getConnections(final String groupId) {
         final ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -124,9 +100,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
 
     /**
      * Configures the specified connection using the specified dto.
-     *
-     * @param connection
-     * @param connectionDTO
      */
     private void configureConnection(Connection connection, ConnectionDTO connectionDTO) {
         // validate flow file comparators/prioritizers
@@ -181,10 +154,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
 
     /**
      * Validates the proposed processor configuration.
-     *
-     * @param processorNode
-     * @param config
-     * @return
      */
     private List<String> validateProposedConfiguration(final String groupId, final ConnectionDTO connectionDTO) {
         List<String> validationErrors = new ArrayList<>();
@@ -233,12 +202,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         return validationErrors;
     }
 
-    /**
-     * Creates a new Connection.
-     *
-     * @param connectionDTO The connection DTO
-     * @return The connection
-     */
     @Override
     public Connection createConnection(final String groupId, final ConnectionDTO connectionDTO) {
         final ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -371,12 +334,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         }
     }
 
-    /**
-     * Updates the specified Connection.
-     *
-     * @param connectionDTO The connection DTO
-     * @return The connection
-     */
     @Override
     public Connection updateConnection(final String groupId, final ConnectionDTO connectionDTO) {
         final ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -500,11 +457,6 @@ public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO
         connection.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified Connection.
-     *
-     * @param id The id of the connection
-     */
     @Override
     public void deleteConnection(final String groupId, final String id) {
         final ProcessGroup group = locateProcessGroup(flowController, groupId);


[19/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/10860944
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/10860944
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/10860944

Branch: refs/heads/NIFI-292
Commit: 10860944d14cc4ddb0805b4085b401ae6dc195b2
Parents: 9a3b6be
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 13:50:09 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 13:50:09 2015 -0400

----------------------------------------------------------------------
 .../nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml  | 28 ++++-----
 .../hadoop/AbstractHadoopProcessor.java         |  7 +--
 .../hadoop/CreateHadoopSequenceFile.java        | 28 +++------
 .../apache/nifi/processors/hadoop/GetHDFS.java  | 60 ++++++++++----------
 .../processors/hadoop/GetHDFSSequenceFile.java  | 18 ++----
 .../nifi/processors/hadoop/KeyValueReader.java  |  6 +-
 .../nifi/processors/hadoop/ValueReader.java     |  5 +-
 .../hadoop/util/ByteFilteringOutputStream.java  | 24 +++-----
 .../hadoop/util/InputStreamWritable.java        |  6 +-
 .../hadoop/util/OutputStreamWritable.java       |  3 +-
 .../hadoop/util/SequenceFileWriter.java         | 12 ++--
 11 files changed, 81 insertions(+), 116 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml
index 1eb29db..ca246b3 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml
@@ -1,18 +1,18 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
+    <!--
+      Licensed to the Apache Software Foundation (ASF) under one or more
+      contributor license agreements.  See the NOTICE file distributed with
+      this work for additional information regarding copyright ownership.
+      The ASF licenses this file to You under the Apache License, Version 2.0
+      (the "License"); you may not use this file except in compliance with
+      the License.  You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+      Unless required by applicable law or agreed to in writing, software
+      distributed under the License is distributed on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+      See the License for the specific language governing permissions and
+      limitations under the License.
+    -->
     <modelVersion>4.0.0</modelVersion>
     <parent>
         <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index 8d5749b..3294ead 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -45,16 +45,15 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.net.NetUtils;
 
 /**
- * This is a base class that is helpful when building processors interacting
- * with HDFS.
+ * This is a base class that is helpful when building processors interacting with HDFS.
  */
 public abstract class AbstractHadoopProcessor extends AbstractProcessor {
 
     // properties
     public static final PropertyDescriptor HADOOP_CONFIGURATION_RESOURCES = new PropertyDescriptor.Builder()
             .name("Hadoop Configuration Resources")
-            .description(
-                    "A file or comma separated list of files which contains the Hadoop file system configuration. Without this, Hadoop will search the classpath for a 'core-site.xml' and 'hdfs-site.xml' file or will revert to a default configuration.")
+            .description("A file or comma separated list of files which contains the Hadoop file system configuration. Without this, Hadoop "
+                    + "will search the classpath for a 'core-site.xml' and 'hdfs-site.xml' file or will revert to a default configuration.")
             .required(false)
             .addValidator(createMultipleFilesExistValidator())
             .build();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
index 98c1f1c..f462277 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
@@ -38,31 +38,21 @@ import org.apache.nifi.processors.hadoop.util.SequenceFileWriter;
 
 /**
  * <p>
- * This processor is used to create a Hadoop Sequence File, which essentially is
- * a file of key/value pairs. The key will be a file name and the value will be
- * the flow file content. The processor will take either a merged (a.k.a.
- * packaged) flow file or a singular flow file. Historically, this processor
- * handled the merging by type and size or time prior to creating a SequenceFile
- * output; it no longer does this. If creating a SequenceFile that contains
- * multiple files of the same type is desired, precede this processor with a
- * <code>RouteOnAttribute</code> processor to segregate files of the same type
- * and follow that with a <code>MergeContent</code> processor to bundle up
- * files. If the type of files is not important, just use the
- * <code>MergeContent</code> processor. When using the <code>MergeContent</code>
- * processor, the following Merge Formats are supported by this processor:
+ * This processor is used to create a Hadoop Sequence File, which essentially is a file of key/value pairs. The key will be a file name and the value will be the flow file content. The processor will
+ * take either a merged (a.k.a. packaged) flow file or a singular flow file. Historically, this processor handled the merging by type and size or time prior to creating a SequenceFile output; it no
+ * longer does this. If creating a SequenceFile that contains multiple files of the same type is desired, precede this processor with a <code>RouteOnAttribute</code> processor to segregate files of
+ * the same type and follow that with a <code>MergeContent</code> processor to bundle up files. If the type of files is not important, just use the <code>MergeContent</code> processor. When using the
+ * <code>MergeContent</code> processor, the following Merge Formats are supported by this processor:
  * <ul>
  * <li>TAR</li>
  * <li>ZIP</li>
  * <li>FlowFileStream v3</li>
  * </ul>
- * The created SequenceFile is named the same as the incoming FlowFile with the
- * suffix '.sf'. For incoming FlowFiles that are bundled, the keys in the
- * SequenceFile are the individual file names, the values are the contents of
- * each file.
+ * The created SequenceFile is named the same as the incoming FlowFile with the suffix '.sf'. For incoming FlowFiles that are bundled, the keys in the SequenceFile are the individual file names, the
+ * values are the contents of each file.
  * </p>
- * NOTE: The value portion of a key/value pair is loaded into memory. While
- * there is a max size limit of 2GB, this could cause memory issues if there are
- * too many concurrent tasks and the flow file sizes are large.
+ * NOTE: The value portion of a key/value pair is loaded into memory. While there is a max size limit of 2GB, this could cause memory issues if there are too many concurrent tasks and the flow file
+ * sizes are large.
  *
  */
 @SideEffectFree

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
index d763c29..361f1ed 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
@@ -65,8 +65,10 @@ import org.apache.nifi.util.StopWatch;
 @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
 @CapabilityDescription("Fetch files from Hadoop Distributed File System (HDFS) into FlowFiles")
 @WritesAttributes({
-        @WritesAttribute(attribute = "filename", description = "The name of the file that was read from HDFS."),
-        @WritesAttribute(attribute = "path", description = "The path is set to the relative path of the file's directory on HDFS. For example, if the Directory property is set to /tmp, then files picked up from /tmp will have the path attribute set to \"./\". If the Recurse Subdirectories property is set to true and a file is picked up from /tmp/abc/1/2/3, then the path attribute will be set to \"abc/1/2/3\".") })
+    @WritesAttribute(attribute = "filename", description = "The name of the file that was read from HDFS."),
+    @WritesAttribute(attribute = "path", description = "The path is set to the relative path of the file's directory on HDFS. For example, if "
+            + "the Directory property is set to /tmp, then files picked up from /tmp will have the path attribute set to \"./\". If the Recurse "
+            + "Subdirectories property is set to true and a file is picked up from /tmp/abc/1/2/3, then the path attribute will be set to \"abc/1/2/3\".")})
 @SeeAlso(PutHDFS.class)
 public class GetHDFS extends AbstractHadoopProcessor {
 
@@ -112,16 +114,16 @@ public class GetHDFS extends AbstractHadoopProcessor {
 
     public static final PropertyDescriptor FILE_FILTER_REGEX = new PropertyDescriptor.Builder()
             .name("File Filter Regex")
-            .description(
-                    "A Java Regular Expression for filtering Filenames; if a filter is supplied then only files whose names match that Regular Expression will be fetched, otherwise all files will be fetched")
+            .description("A Java Regular Expression for filtering Filenames; if a filter is supplied then only files whose names match that Regular "
+                    + "Expression will be fetched, otherwise all files will be fetched")
             .required(false)
             .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
             .build();
 
     public static final PropertyDescriptor FILTER_MATCH_NAME_ONLY = new PropertyDescriptor.Builder()
             .name("Filter Match Name Only")
-            .description(
-                    "If true then File Filter Regex will match on just the filename, otherwise subdirectory names will be included with filename in the regex comparison")
+            .description("If true then File Filter Regex will match on just the filename, otherwise subdirectory names will be included with filename "
+                    + "in the regex comparison")
             .required(true)
             .allowableValues("true", "false")
             .defaultValue("true")
@@ -137,21 +139,17 @@ public class GetHDFS extends AbstractHadoopProcessor {
 
     public static final PropertyDescriptor MIN_AGE = new PropertyDescriptor.Builder()
             .name("Minimum File Age")
-            .description(
-                    "The minimum age that a file must be in order to be pulled; any file younger than this amount of time (based on last modification date) will be ignored")
+            .description("The minimum age that a file must be in order to be pulled; any file younger than this amount of time (based on last modification date) will be ignored")
             .required(true)
-            .addValidator(
-                    StandardValidators.createTimePeriodValidator(0, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
+            .addValidator(StandardValidators.createTimePeriodValidator(0, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
             .defaultValue("0 sec")
             .build();
 
     public static final PropertyDescriptor MAX_AGE = new PropertyDescriptor.Builder()
             .name("Maximum File Age")
-            .description(
-                    "The maximum age that a file must be in order to be pulled; any file older than this amount of time (based on last modification date) will be ignored")
+            .description("The maximum age that a file must be in order to be pulled; any file older than this amount of time (based on last modification date) will be ignored")
             .required(false)
-            .addValidator(
-                    StandardValidators.createTimePeriodValidator(100, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
+            .addValidator(StandardValidators.createTimePeriodValidator(100, TimeUnit.MILLISECONDS, Long.MAX_VALUE, TimeUnit.NANOSECONDS))
             .build();
 
     public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder()
@@ -389,11 +387,11 @@ public class GetHDFS extends AbstractHadoopProcessor {
     /**
      * Do a listing of HDFS if the POLLING_INTERVAL has lapsed.
      *
-     * Will return null if POLLING_INTERVAL has not lapsed. Will return an empty
-     * set if no files were found on HDFS that matched the configured filters.
-     * @param context
-     * @return 
-     * @throws java.io.IOException
+     * Will return null if POLLING_INTERVAL has not lapsed. Will return an empty set if no files were found on HDFS that matched the configured filters.
+     *
+     * @param context context
+     * @return null if POLLING_INTERVAL has not lapsed. Will return an empty set if no files were found on HDFS that matched the configured filters
+     * @throws java.io.IOException ex
      */
     protected Set<Path> performListing(final ProcessContext context) throws IOException {
 
@@ -417,11 +415,12 @@ public class GetHDFS extends AbstractHadoopProcessor {
 
     /**
      * Poll HDFS for files to process that match the configured file filters.
-     * @param hdfs
-     * @param dir
-     * @param filesVisited
-     * @return 
-     * @throws java.io.IOException 
+     *
+     * @param hdfs hdfs
+     * @param dir dir
+     * @param filesVisited filesVisited
+     * @return files to process
+     * @throws java.io.IOException ex
      */
     protected Set<Path> selectFiles(final FileSystem hdfs, final Path dir, Set<Path> filesVisited) throws IOException {
         if (null == filesVisited) {
@@ -465,11 +464,11 @@ public class GetHDFS extends AbstractHadoopProcessor {
     }
 
     /**
-     * Returns the relative path of the child that does not include the filename
-     * or the root path.
-     * @param root
-     * @param child
-     * @return 
+     * Returns the relative path of the child that does not include the filename or the root path.
+     *
+     * @param root root
+     * @param child child
+     * @return the relative path of the child that does not include the filename or the root path
      */
     public static String getPathDifference(final Path root, final Path child) {
         final int depthDiff = child.depth() - root.depth();
@@ -492,8 +491,7 @@ public class GetHDFS extends AbstractHadoopProcessor {
     }
 
     /**
-     * Holder for a snapshot in time of some processor properties that are
-     * passed around.
+     * Holder for a snapshot in time of some processor properties that are passed around.
      */
     protected static class ProcessorConfiguration {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
index 88e725b..22ba36b 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
@@ -40,19 +40,13 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 /**
- * This processor is used to pull files from HDFS. The files being pulled in
- * MUST be SequenceFile formatted files. The processor creates a flow file for
- * each key/value entry in the ingested SequenceFile. The created flow file's
- * content depends on the value of the optional configuration property FlowFile
- * Content. Currently, there are two choices: VALUE ONLY and KEY VALUE PAIR.
- * With the prior, only the SequenceFile value element is written to the flow
- * file contents. With the latter, the SequenceFile key and value are written to
- * the flow file contents as serialized objects; the format is key length (int),
- * key(String), value length(int), value(bytes). The default is VALUE ONLY.
+ * This processor is used to pull files from HDFS. The files being pulled in MUST be SequenceFile formatted files. The processor creates a flow file for each key/value entry in the ingested
+ * SequenceFile. The created flow file's content depends on the value of the optional configuration property FlowFile Content. Currently, there are two choices: VALUE ONLY and KEY VALUE PAIR. With the
+ * prior, only the SequenceFile value element is written to the flow file contents. With the latter, the SequenceFile key and value are written to the flow file contents as serialized objects; the
+ * format is key length (int), key(String), value length(int), value(bytes). The default is VALUE ONLY.
  * <p>
- * NOTE: This processor loads the entire value entry into memory. While the size
- * limit for a value entry is 2GB, this will cause memory problems if there are
- * too many concurrent tasks and the data being ingested is large.
+ * NOTE: This processor loads the entire value entry into memory. While the size limit for a value entry is 2GB, this will cause memory problems if there are too many concurrent tasks and the data
+ * being ingested is large.
  *
  */
 @TriggerWhenEmpty

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
index 40ef5fa..38f2aae 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
@@ -43,10 +43,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * This class reads a SequenceFile and generates FlowFiles, one per KeyValue
- * pair in the SequenceFile. The FlowFile name is based on the the incoming file
- * name with System nanotime appended; the FlowFile content is the key/value
- * pair serialized via Text.
+ * This class reads a SequenceFile and generates FlowFiles, one per KeyValue pair in the SequenceFile. The FlowFile name is based on the the incoming file name with System nanotime appended; the
+ * FlowFile content is the key/value pair serialized via Text.
  */
 public class KeyValueReader implements SequenceFileReader<Set<FlowFile>> {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
index e334582..a6f7005 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
@@ -42,9 +42,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * This class reads a SequenceFile and generates FlowFiles, one per each
- * KeyValue Pair in the SequenceFile. The FlowFile name is the key, which is
- * typically a file name but may not be; the FlowFile content is the value.
+ * This class reads a SequenceFile and generates FlowFiles, one per each KeyValue Pair in the SequenceFile. The FlowFile name is the key, which is typically a file name but may not be; the FlowFile
+ * content is the value.
  *
  */
 public class ValueReader implements SequenceFileReader<Set<FlowFile>> {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
index 5cbcab6..58a30f5 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
@@ -24,10 +24,8 @@ import java.util.Arrays;
 import java.util.List;
 
 /**
- * This class allows the user to define byte-array filters or single-byte
- * filters that will modify the content that is written to the underlying
- * stream. Each filter can be given a maximum number of replacements that it
- * should perform.
+ * This class allows the user to define byte-array filters or single-byte filters that will modify the content that is written to the underlying stream. Each filter can be given a maximum number of
+ * replacements that it should perform.
  */
 public class ByteFilteringOutputStream extends FilterOutputStream {
 
@@ -66,8 +64,7 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
 
     /**
      * Causes this stream to write <tt>replaceWith</tt> in place of
-     * <tt>toReplace</tt> if {@link #write(byte[], int, int)} is called where
-     * the value to write is equal to
+     * <tt>toReplace</tt> if {@link #write(byte[], int, int)} is called where the value to write is equal to
      * <tt>toReplace</tt>.
      * <p/>
      * @param toReplace the byte array to replace
@@ -79,14 +76,12 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
 
     /**
      * Causes this stream to write <tt>replaceWith</tt> in place of
-     * <tt>toReplace</tt> if {@link #write(byte[], int, int)} is called where
-     * the value to write is equal to
+     * <tt>toReplace</tt> if {@link #write(byte[], int, int)} is called where the value to write is equal to
      * <tt>toReplace</tt>.
      * <p/>
      * @param toReplace the byte array to replace
      * @param replaceWith the byte array to be substituted
-     * @param maxReplacements the maximum number of replacements that should be
-     * made
+     * @param maxReplacements the maximum number of replacements that should be made
      */
     public void addFilter(final byte[] toReplace, final byte[] replaceWith, final int maxReplacements) {
         multiByteFilters.add(new Filter(toReplace, replaceWith, maxReplacements));
@@ -94,8 +89,7 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
 
     /**
      * Causes this stream to write <tt>replaceWith</tt> in place of
-     * <tt>toReplace</tt> if {@link #write(int)} is called where the value to
-     * write is equal to
+     * <tt>toReplace</tt> if {@link #write(int)} is called where the value to write is equal to
      * <tt>toReplace</tt>.
      * <p/>
      * @param toReplace the byte to replace
@@ -107,14 +101,12 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
 
     /**
      * Causes this stream to write <tt>replaceWith</tt> in place of
-     * <tt>toReplace</tt> if {@link #write(int)} is called where the value to
-     * write is equal to
+     * <tt>toReplace</tt> if {@link #write(int)} is called where the value to write is equal to
      * <tt>toReplace</tt>.
      * <p/>
      * @param toReplace the byte to replace
      * @param replaceWith the byte to be substituted
-     * @param maxReplacements the maximum number of replacements that should be
-     * made
+     * @param maxReplacements the maximum number of replacements that should be made
      */
     public void addFilter(final byte toReplace, final byte replaceWith, final int maxReplacements) {
         singleByteFilters.add(new Filter(new byte[]{toReplace}, new byte[]{replaceWith}, maxReplacements));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
index ee09450..4cb2e8d 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
@@ -24,10 +24,8 @@ import java.io.InputStream;
 import org.apache.hadoop.io.Writable;
 
 /**
- * Simple implementation of {@link Writable} that writes data from an
- * InputStream. This class will throw an
- * <tt>UnsupportedOperationException</tt> if {@link #readFields(DataInput)} is
- * called.
+ * Simple implementation of {@link Writable} that writes data from an InputStream. This class will throw an
+ * <tt>UnsupportedOperationException</tt> if {@link #readFields(DataInput)} is called.
  */
 public class InputStreamWritable implements Writable {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
index 62fdc35..e5f29dd 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
@@ -28,8 +28,7 @@ import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.Writable;
 
 /**
- * This class will write to an output stream, rather than an in-memory buffer,
- * the fields being read.
+ * This class will write to an output stream, rather than an in-memory buffer, the fields being read.
  *
  * @author unattributed
  *

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/10860944/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
index 35703b1..851afd8 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
@@ -25,15 +25,13 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 public interface SequenceFileWriter {
 
     /**
-     * Creates a Sequence File by writing the given FlowFile as key/value pairs.
-     * The provided FlowFile may be a package of multiple FlowFiles, or just
-     * one. The keys for the Sequence File are the flow files' logical names.
-     * The values are the flow files' content.
+     * Creates a Sequence File by writing the given FlowFile as key/value pairs. The provided FlowFile may be a package of multiple FlowFiles, or just one. The keys for the Sequence File are the flow
+     * files' logical names. The values are the flow files' content.
      *
      * @param flowFile - the FlowFile to write to the Sequence File.
-     * @param session
-     * @param configuration
-     * @param compressionType
+     * @param session session
+     * @param configuration configuration
+     * @param compressionType compression type
      * @return the written to SequenceFile flow file
      */
     FlowFile writeSequenceFile(FlowFile flowFile, ProcessSession session, Configuration configuration, CompressionType compressionType);


[18/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/9a3b6bed
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/9a3b6bed
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/9a3b6bed

Branch: refs/heads/NIFI-292
Commit: 9a3b6bed62e9962ff97f2c76055cd4d49705ff89
Parents: 6a70645
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 13:43:35 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 13:43:35 2015 -0400

----------------------------------------------------------------------
 .../distributed/cache/client/CommsSession.java  |  16 +--
 .../DistributedMapCacheClientService.java       |   7 +-
 .../DistributedSetCacheClientService.java       |   6 +-
 .../cache/client/SSLCommsSession.java           |  25 ++--
 .../cache/client/StandardCommsSession.java      |   1 +
 .../additionalDetails.html                      |  60 +++++-----
 .../cache/server/AbstractCacheServer.java       |  25 ++--
 .../distributed/cache/server/CacheRecord.java   |  12 +-
 .../distributed/cache/server/CacheServer.java   |   3 +-
 .../cache/server/DistributedCacheServer.java    |   3 +-
 .../cache/server/DistributedSetCacheServer.java |  13 ++-
 .../cache/server/EvictionPolicy.java            |  24 ++--
 .../cache/server/SetCacheServer.java            |  25 ++--
 .../server/map/DistributedMapCacheServer.java   |  12 +-
 .../distributed/cache/server/map/MapCache.java  |   4 +
 .../cache/server/map/MapCacheRecord.java        |  19 ++--
 .../cache/server/map/MapCacheServer.java        | 113 ++++++++++---------
 .../cache/server/map/MapPutResult.java          |   5 +-
 .../cache/server/map/PersistentMapCache.java    |  51 ++++-----
 .../cache/server/map/SimpleMapCache.java        |  47 ++++----
 .../cache/server/set/PersistentSetCache.java    |  57 +++++-----
 .../distributed/cache/server/set/SetCache.java  |   5 +-
 .../cache/server/set/SetCacheRecord.java        |  15 +--
 .../cache/server/set/SetCacheResult.java        |  11 +-
 .../cache/server/set/SimpleSetCache.java        |  41 +++----
 .../additionalDetails.html                      |  62 +++++-----
 .../cache/server/TestServerAndClient.java       |   9 +-
 .../nifi-http-context-map-api/pom.xml           |  34 +++---
 .../org/apache/nifi/http/HttpContextMap.java    |  45 ++++----
 .../nifi-http-context-map/pom.xml               |  20 ++--
 .../nifi/http/StandardHttpContextMap.java       |  83 +++++++-------
 .../index.html                                  |  36 +++---
 .../nifi/ssl/StandardSSLContextService.java     |   3 +-
 .../apache/nifi/ssl/SSLContextServiceTest.java  |   4 +-
 34 files changed, 461 insertions(+), 435 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/CommsSession.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/CommsSession.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/CommsSession.java
index f838c2f..c035485 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/CommsSession.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/CommsSession.java
@@ -27,20 +27,20 @@ import javax.net.ssl.SSLContext;
 public interface CommsSession extends Closeable {
 
     void setTimeout(final long value, final TimeUnit timeUnit);
-    
+
     InputStream getInputStream() throws IOException;
-    
+
     OutputStream getOutputStream() throws IOException;
-    
+
     boolean isClosed();
-    
+
     void interrupt();
-    
+
     String getHostname();
-    
+
     int getPort();
-    
+
     long getTimeout(TimeUnit timeUnit);
-    
+
     SSLContext getSSLContext();
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedMapCacheClientService.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedMapCacheClientService.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedMapCacheClientService.java
index 92bda8f..51138b9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedMapCacheClientService.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedMapCacheClientService.java
@@ -42,7 +42,7 @@ import org.apache.nifi.stream.io.DataOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-@SeeAlso(classNames={"org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer", "org.apache.nifi.ssl.StandardSSLContextService"})
+@SeeAlso(classNames = {"org.apache.nifi.distributed.cache.server.map.DistributedMapCacheServer", "org.apache.nifi.ssl.StandardSSLContextService"})
 @CapabilityDescription("Provides the ability to communicate with a DistributedMapCacheServer. This can be used in order to share a Map "
         + "between nodes in a NiFi cluster")
 public class DistributedMapCacheClientService extends AbstractControllerService implements DistributedMapCacheClient {
@@ -65,14 +65,14 @@ public class DistributedMapCacheClientService extends AbstractControllerService
     public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
             .name("SSL Context Service")
             .description("If specified, indicates the SSL Context Service that is used to communicate with the "
-            		+ "remote server. If not specified, communications will not be encrypted")
+                    + "remote server. If not specified, communications will not be encrypted")
             .required(false)
             .identifiesControllerService(SSLContextService.class)
             .build();
     public static final PropertyDescriptor COMMUNICATIONS_TIMEOUT = new PropertyDescriptor.Builder()
             .name("Communications Timeout")
             .description("Specifies how long to wait when communicating with the remote server before determining that "
-            		+ "there is a communications failure if data cannot be sent or received")
+                    + "there is a communications failure if data cannot be sent or received")
             .required(true)
             .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
             .defaultValue("30 secs")
@@ -299,6 +299,7 @@ public class DistributedMapCacheClientService extends AbstractControllerService
     }
 
     private static interface CommsAction<T> {
+
         T execute(CommsSession commsSession) throws IOException;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedSetCacheClientService.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedSetCacheClientService.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedSetCacheClientService.java
index 2de4ccb..63d59ca 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedSetCacheClientService.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/DistributedSetCacheClientService.java
@@ -42,7 +42,7 @@ import org.apache.nifi.stream.io.DataOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-@SeeAlso(classNames={"org.apache.nifi.distributed.cache.server.DistributedSetCacheServer", "org.apache.nifi.ssl.StandardSSLContextService"})
+@SeeAlso(classNames = {"org.apache.nifi.distributed.cache.server.DistributedSetCacheServer", "org.apache.nifi.ssl.StandardSSLContextService"})
 @CapabilityDescription("Provides the ability to communicate with a DistributedSetCacheServer. This can be used in order to share a Set "
         + "between nodes in a NiFi cluster")
 public class DistributedSetCacheClientService extends AbstractControllerService implements DistributedSetCacheClient {
@@ -65,14 +65,14 @@ public class DistributedSetCacheClientService extends AbstractControllerService
     public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
             .name("SSL Context Service")
             .description("If specified, indicates the SSL Context Service that is used to communicate with the "
-            		+ "remote server. If not specified, communications will not be encrypted")
+                    + "remote server. If not specified, communications will not be encrypted")
             .required(false)
             .identifiesControllerService(SSLContextService.class)
             .build();
     public static final PropertyDescriptor COMMUNICATIONS_TIMEOUT = new PropertyDescriptor.Builder()
             .name("Communications Timeout")
             .description("Specifices how long to wait when communicating with the remote server before determining "
-            		+ "that there is a communications failure if data cannot be sent or received")
+                    + "that there is a communications failure if data cannot be sent or received")
             .required(true)
             .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
             .defaultValue("30 secs")

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/SSLCommsSession.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/SSLCommsSession.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/SSLCommsSession.java
index 9b4b656..3d400bb 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/SSLCommsSession.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/SSLCommsSession.java
@@ -30,36 +30,37 @@ import org.apache.nifi.remote.io.socket.ssl.SSLSocketChannelInputStream;
 import org.apache.nifi.remote.io.socket.ssl.SSLSocketChannelOutputStream;
 
 public class SSLCommsSession implements CommsSession {
+
     private final SSLSocketChannel sslSocketChannel;
     private final SSLContext sslContext;
     private final String hostname;
     private final int port;
-    
+
     private final SSLSocketChannelInputStream in;
     private final BufferedInputStream bufferedIn;
-    
+
     private final SSLSocketChannelOutputStream out;
     private final BufferedOutputStream bufferedOut;
 
-    public SSLCommsSession(final SSLContext sslContext, final String hostname, final int port) throws IOException { 
+    public SSLCommsSession(final SSLContext sslContext, final String hostname, final int port) throws IOException {
         sslSocketChannel = new SSLSocketChannel(sslContext, hostname, port, true);
-        
+
         in = new SSLSocketChannelInputStream(sslSocketChannel);
         bufferedIn = new BufferedInputStream(in);
-        
+
         out = new SSLSocketChannelOutputStream(sslSocketChannel);
         bufferedOut = new BufferedOutputStream(out);
-        
+
         this.sslContext = sslContext;
         this.hostname = hostname;
         this.port = port;
     }
-    
+
     @Override
     public void interrupt() {
         sslSocketChannel.interrupt();
     }
-    
+
     @Override
     public void close() throws IOException {
         sslSocketChannel.close();
@@ -84,23 +85,25 @@ public class SSLCommsSession implements CommsSession {
     public boolean isClosed() {
         return sslSocketChannel.isClosed();
     }
-    
+
     @Override
     public String getHostname() {
         return hostname;
     }
-    
+
     @Override
     public int getPort() {
         return port;
     }
+
     @Override
     public SSLContext getSSLContext() {
         return sslContext;
     }
+
     @Override
     public long getTimeout(final TimeUnit timeUnit) {
         return timeUnit.convert(sslSocketChannel.getTimeout(), TimeUnit.MILLISECONDS);
     }
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/StandardCommsSession.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/StandardCommsSession.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/StandardCommsSession.java
index 1f1ff7e..b2a5c1d 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/StandardCommsSession.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/java/org/apache/nifi/distributed/cache/client/StandardCommsSession.java
@@ -33,6 +33,7 @@ import org.apache.nifi.remote.io.socket.SocketChannelInputStream;
 import org.apache.nifi.remote.io.socket.SocketChannelOutputStream;
 
 public class StandardCommsSession implements CommsSession {
+
     private final SocketChannel socketChannel;
     private final String hostname;
     private final int port;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/resources/docs/org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService/additionalDetails.html
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/resources/docs/org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService/additionalDetails.html b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/resources/docs/org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService/additionalDetails.html
index 4cde8c6..1568635 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/resources/docs/org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService/additionalDetails.html
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-client-service/src/main/resources/docs/org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService/additionalDetails.html
@@ -1,35 +1,35 @@
 <!DOCTYPE html>
 <html lang="en">
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<head>
-<meta charset="utf-8" />
-<title>Distributed Map Cache Client Service</title>
-<link rel="stylesheet" href="../../css/component-usage.css" type="text/css" />
-</head>
+    <!--
+      Licensed to the Apache Software Foundation (ASF) under one or more
+      contributor license agreements.  See the NOTICE file distributed with
+      this work for additional information regarding copyright ownership.
+      The ASF licenses this file to You under the Apache License, Version 2.0
+      (the "License"); you may not use this file except in compliance with
+      the License.  You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+      Unless required by applicable law or agreed to in writing, software
+      distributed under the License is distributed on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+      See the License for the specific language governing permissions and
+      limitations under the License.
+    -->
+    <head>
+        <meta charset="utf-8" />
+        <title>Distributed Map Cache Client Service</title>
+        <link rel="stylesheet" href="../../css/component-usage.css" type="text/css" />
+    </head>
 
-<body>
-	<p>
-		Below is an example of how to create a client connection to your distributed map cache server. 
-		Note that the identifier in this example is <code>cache-client</code>. If you are using this template
-		to create your own MapCacheClient service, replace the values in this template with values that are
-		suitable for your system. Possible options for <code>Server Hostname</code>, <code>Server Port</code>,
-		<code>Communications Timeout</code>, and <span style="font-style: italic;">SSL Context Service</span>.
-	</p>
+    <body>
+        <p>
+            Below is an example of how to create a client connection to your distributed map cache server. 
+            Note that the identifier in this example is <code>cache-client</code>. If you are using this template
+            to create your own MapCacheClient service, replace the values in this template with values that are
+            suitable for your system. Possible options for <code>Server Hostname</code>, <code>Server Port</code>,
+            <code>Communications Timeout</code>, and <span style="font-style: italic;">SSL Context Service</span>.
+        </p>
 
-	<pre>
+        <pre>
 &lt;?xml version="1.0" encoding="UTF-8" ?&gt;
 &lt;services&gt;
     &lt;service&gt;
@@ -40,6 +40,6 @@
         &lt;property name="Communications Timeout"&gt;30 secs&lt;/property&gt;
     &lt;/service&gt;
 &lt;/services&gt;
-	</pre>
-</body>
+        </pre>
+    </body>
 </html>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/AbstractCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/AbstractCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/AbstractCacheServer.java
index a6a2458..10f53b2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/AbstractCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/AbstractCacheServer.java
@@ -51,7 +51,8 @@ public abstract class AbstractCacheServer implements CacheServer {
     private final int port;
     private final SSLContext sslContext;
     protected volatile boolean stopped = false;
-    private final Set<Thread> processInputThreads = new CopyOnWriteArraySet<>();;
+    private final Set<Thread> processInputThreads = new CopyOnWriteArraySet<>();
+    ;
 
     private volatile ServerSocketChannel serverSocketChannel;
 
@@ -75,7 +76,7 @@ public abstract class AbstractCacheServer implements CacheServer {
                     final SocketChannel socketChannel;
                     try {
                         socketChannel = serverSocketChannel.accept();
-                        logger.debug("Connected to {}", new Object[] { socketChannel });
+                        logger.debug("Connected to {}", new Object[]{socketChannel});
                     } catch (final IOException e) {
                         if (!stopped) {
                             logger.error("{} unable to accept connection from remote peer due to {}", this, e.toString());
@@ -104,7 +105,7 @@ public abstract class AbstractCacheServer implements CacheServer {
                                     rawOutputStream = new SSLSocketChannelOutputStream(sslSocketChannel);
                                 }
                             } catch (IOException e) {
-                                logger.error("Cannot create input and/or output streams for {}", new Object[] { identifier }, e);
+                                logger.error("Cannot create input and/or output streams for {}", new Object[]{identifier}, e);
                                 if (logger.isDebugEnabled()) {
                                     logger.error("", e);
                                 }
@@ -112,7 +113,7 @@ public abstract class AbstractCacheServer implements CacheServer {
                                     socketChannel.close();
                                 } catch (IOException swallow) {
                                 }
-                               
+
                                 return;
                             }
                             try (final InputStream in = new BufferedInputStream(rawInputStream);
@@ -127,12 +128,12 @@ public abstract class AbstractCacheServer implements CacheServer {
                                     continueComms = listen(in, out, versionNegotiator.getVersion());
                                 }
                                 // client has issued 'close'
-                                logger.debug("Client issued close on {}", new Object[] { socketChannel });
+                                logger.debug("Client issued close on {}", new Object[]{socketChannel});
                             } catch (final SocketTimeoutException e) {
                                 logger.debug("30 sec timeout reached", e);
                             } catch (final IOException | HandshakeException e) {
                                 if (!stopped) {
-                                    logger.error("{} unable to communicate with remote peer {} due to {}", new Object[] { this, peer, e.toString() });
+                                    logger.error("{} unable to communicate with remote peer {} due to {}", new Object[]{this, peer, e.toString()});
                                     if (logger.isDebugEnabled()) {
                                         logger.error("", e);
                                     }
@@ -161,7 +162,7 @@ public abstract class AbstractCacheServer implements CacheServer {
     @Override
     public void stop() throws IOException {
         stopped = true;
-        logger.info("Stopping CacheServer {}", new Object[] { this.identifier });
+        logger.info("Stopping CacheServer {}", new Object[]{this.identifier});
 
         if (serverSocketChannel != null && serverSocketChannel.isOpen()) {
             serverSocketChannel.close();
@@ -188,12 +189,12 @@ public abstract class AbstractCacheServer implements CacheServer {
 
     /**
      * Listens for incoming data and communicates with remote peer
-     * 
-     * @param in
-     * @param out
-     * @param version
+     *
+     * @param in in
+     * @param out out
+     * @param version version
      * @return <code>true</code> if communications should continue, <code>false</code> otherwise
-     * @throws IOException
+     * @throws IOException ex
      */
     protected abstract boolean listen(InputStream in, OutputStream out, int version) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheRecord.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheRecord.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheRecord.java
index 71ac56d..d7604cd 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheRecord.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheRecord.java
@@ -22,26 +22,26 @@ import java.util.concurrent.atomic.AtomicLong;
 public class CacheRecord {
 
     private static final AtomicLong idGenerator = new AtomicLong(0L);
-    
+
     private final long id;
     private final long entryDate;
     private volatile long lastHitDate;
     private final AtomicInteger hitCount = new AtomicInteger(0);
-    
+
     public CacheRecord() {
         entryDate = System.currentTimeMillis();
         lastHitDate = entryDate;
         id = idGenerator.getAndIncrement();
     }
-    
+
     public long getEntryDate() {
         return entryDate;
     }
-    
+
     public long getLastHitDate() {
         return lastHitDate;
     }
-    
+
     public int getHitCount() {
         return hitCount.get();
     }
@@ -50,7 +50,7 @@ public class CacheRecord {
         hitCount.getAndIncrement();
         lastHitDate = System.currentTimeMillis();
     }
-    
+
     public long getId() {
         return id;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheServer.java
index 2c85cd8..fab8f13 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/CacheServer.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 public interface CacheServer {
 
     void start() throws IOException;
+
     void stop() throws IOException;
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedCacheServer.java
index f2e848f..5907f50 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedCacheServer.java
@@ -29,6 +29,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.ssl.SSLContextService;
 
 public abstract class DistributedCacheServer extends AbstractControllerService {
+
     public static final String EVICTION_STRATEGY_LFU = "Least Frequently Used";
     public static final String EVICTION_STRATEGY_LRU = "Least Recently Used";
     public static final String EVICTION_STRATEGY_FIFO = "First In, First Out";
@@ -43,7 +44,7 @@ public abstract class DistributedCacheServer extends AbstractControllerService {
     public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
             .name("SSL Context Service")
             .description("If specified, this service will be used to create an SSL Context that will be used "
-            		+ "to secure communications; if not specified, communications will not be secure")
+                    + "to secure communications; if not specified, communications will not be secure")
             .required(false)
             .identifiesControllerService(SSLContextService.class)
             .build();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedSetCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedSetCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedSetCacheServer.java
index 70e86c4..799baa3 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedSetCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/DistributedSetCacheServer.java
@@ -25,6 +25,7 @@ import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.controller.ConfigurationContext;
 import org.apache.nifi.ssl.SSLContextService;
 import org.apache.nifi.ssl.SSLContextService.ClientAuth;
+
 @Tags({"distributed", "set", "distinct", "cache", "server"})
 @CapabilityDescription("Provides a set (collection of unique values) cache that can be accessed over a socket. "
         + "Interaction with this service is typically accomplished via a DistributedSetCacheClient service.")
@@ -37,14 +38,14 @@ public class DistributedSetCacheServer extends DistributedCacheServer {
         final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
         final int maxSize = context.getProperty(MAX_CACHE_ENTRIES).asInteger();
         final String evictionPolicyName = context.getProperty(EVICTION_POLICY).getValue();
-        
+
         final SSLContext sslContext;
-        if ( sslContextService == null ) {
+        if (sslContextService == null) {
             sslContext = null;
         } else {
             sslContext = sslContextService.createSSLContext(ClientAuth.REQUIRED);
         }
-        
+
         final EvictionPolicy evictionPolicy;
         switch (evictionPolicyName) {
             case EVICTION_STRATEGY_FIFO:
@@ -59,14 +60,14 @@ public class DistributedSetCacheServer extends DistributedCacheServer {
             default:
                 throw new IllegalArgumentException("Illegal Eviction Policy: " + evictionPolicyName);
         }
-        
+
         try {
             final File persistenceDir = persistencePath == null ? null : new File(persistencePath);
-            
+
             return new SetCacheServer(getIdentifier(), sslContext, port, maxSize, evictionPolicy, persistenceDir);
         } catch (final Exception e) {
             throw new RuntimeException(e);
         }
     }
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/EvictionPolicy.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/EvictionPolicy.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/EvictionPolicy.java
index 60bd2c1..e6d577d 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/EvictionPolicy.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/EvictionPolicy.java
@@ -19,37 +19,40 @@ package org.apache.nifi.distributed.cache.server;
 import java.util.Comparator;
 
 public enum EvictionPolicy {
+
     LFU(new LFUComparator()),
     LRU(new LRUComparator()),
     FIFO(new FIFOComparator());
-    
+
     private final Comparator<CacheRecord> comparator;
-    
+
     private EvictionPolicy(final Comparator<CacheRecord> comparator) {
         this.comparator = comparator;
     }
-    
+
     public Comparator<CacheRecord> getComparator() {
         return comparator;
     }
-    
+
     public static class LFUComparator implements Comparator<CacheRecord> {
+
         @Override
         public int compare(final CacheRecord o1, final CacheRecord o2) {
-            if ( o1.equals(o2) ) {
+            if (o1.equals(o2)) {
                 return 0;
             }
-            
+
             final int hitCountComparison = Integer.compare(o1.getHitCount(), o2.getHitCount());
             final int entryDateComparison = (hitCountComparison == 0) ? Long.compare(o1.getEntryDate(), o2.getEntryDate()) : hitCountComparison;
             return (entryDateComparison == 0 ? Long.compare(o1.getId(), o2.getId()) : entryDateComparison);
         }
     }
-    
+
     public static class LRUComparator implements Comparator<CacheRecord> {
+
         @Override
         public int compare(final CacheRecord o1, final CacheRecord o2) {
-            if ( o1.equals(o2) ) {
+            if (o1.equals(o2)) {
                 return 0;
             }
 
@@ -57,11 +60,12 @@ public enum EvictionPolicy {
             return (lastHitDateComparison == 0 ? Long.compare(o1.getId(), o2.getId()) : lastHitDateComparison);
         }
     }
-    
+
     public static class FIFOComparator implements Comparator<CacheRecord> {
+
         @Override
         public int compare(final CacheRecord o1, final CacheRecord o2) {
-            if ( o1.equals(o2) ) {
+            if (o1.equals(o2)) {
                 return 0;
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/SetCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/SetCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/SetCacheServer.java
index d0abe5c..3dd224b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/SetCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/SetCacheServer.java
@@ -67,17 +67,17 @@ public class SetCacheServer extends AbstractCacheServer {
 
         final SetCacheResult response;
         switch (action) {
-        case "addIfAbsent":
-            response = cache.addIfAbsent(valueBuffer);
-            break;
-        case "contains":
-            response = cache.contains(valueBuffer);
-            break;
-        case "remove":
-            response = cache.remove(valueBuffer);
-            break;
-        default:
-            throw new IOException("IllegalRequest");
+            case "addIfAbsent":
+                response = cache.addIfAbsent(valueBuffer);
+                break;
+            case "contains":
+                response = cache.contains(valueBuffer);
+                break;
+            case "remove":
+                response = cache.remove(valueBuffer);
+                break;
+            default:
+                throw new IOException("IllegalRequest");
         }
 
         dos.writeBoolean(response.getResult());
@@ -97,8 +97,9 @@ public class SetCacheServer extends AbstractCacheServer {
 
     @Override
     protected void finalize() throws Throwable {
-        if (!stopped)
+        if (!stopped) {
             stop();
+        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheServer.java
index 0594dd4..dce7ccd 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheServer.java
@@ -33,7 +33,7 @@ import org.apache.nifi.ssl.SSLContextService.ClientAuth;
 @Tags({"distributed", "cluster", "map", "cache", "server", "key/value"})
 @CapabilityDescription("Provides a map (key/value) cache that can be accessed over a socket. Interaction with this service"
         + " is typically accomplished via a DistributedMapCacheClient service.")
-@SeeAlso(classNames={"org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService", "org.apache.nifi.ssl.StandardSSLContextService"})
+@SeeAlso(classNames = {"org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService", "org.apache.nifi.ssl.StandardSSLContextService"})
 public class DistributedMapCacheServer extends DistributedCacheServer {
 
     @Override
@@ -43,14 +43,14 @@ public class DistributedMapCacheServer extends DistributedCacheServer {
         final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
         final int maxSize = context.getProperty(MAX_CACHE_ENTRIES).asInteger();
         final String evictionPolicyName = context.getProperty(EVICTION_POLICY).getValue();
-        
+
         final SSLContext sslContext;
-        if ( sslContextService == null ) {
+        if (sslContextService == null) {
             sslContext = null;
         } else {
             sslContext = sslContextService.createSSLContext(ClientAuth.REQUIRED);
         }
-        
+
         final EvictionPolicy evictionPolicy;
         switch (evictionPolicyName) {
             case EVICTION_STRATEGY_FIFO:
@@ -65,10 +65,10 @@ public class DistributedMapCacheServer extends DistributedCacheServer {
             default:
                 throw new IllegalArgumentException("Illegal Eviction Policy: " + evictionPolicyName);
         }
-        
+
         try {
             final File persistenceDir = persistencePath == null ? null : new File(persistencePath);
-            
+
             return new MapCacheServer(getIdentifier(), sslContext, port, maxSize, evictionPolicy, persistenceDir);
         } catch (final Exception e) {
             throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCache.java
index 534cb0b..fad0adb 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCache.java
@@ -22,8 +22,12 @@ import java.nio.ByteBuffer;
 public interface MapCache {
 
     MapPutResult putIfAbsent(ByteBuffer key, ByteBuffer value) throws IOException;
+
     boolean containsKey(ByteBuffer key) throws IOException;
+
     ByteBuffer get(ByteBuffer key) throws IOException;
+
     ByteBuffer remove(ByteBuffer key) throws IOException;
+
     void shutdown() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheRecord.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheRecord.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheRecord.java
index b0ab0c4..ff032b1 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheRecord.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheRecord.java
@@ -21,38 +21,39 @@ import java.nio.ByteBuffer;
 import org.apache.nifi.distributed.cache.server.CacheRecord;
 
 public class MapCacheRecord extends CacheRecord {
+
     private final ByteBuffer key;
     private final ByteBuffer value;
-    
+
     public MapCacheRecord(final ByteBuffer key, final ByteBuffer value) {
         this.key = key;
         this.value = value;
     }
-    
+
     public ByteBuffer getKey() {
         return key;
     }
-    
+
     public ByteBuffer getValue() {
         return value;
     }
-    
+
     @Override
     public int hashCode() {
         return 2938476 + key.hashCode() * value.hashCode();
     }
-    
+
     @Override
     public boolean equals(final Object obj) {
-        if ( obj == this ) {
+        if (obj == this) {
             return true;
         }
-        
-        if ( obj instanceof MapCacheRecord ) {
+
+        if (obj instanceof MapCacheRecord) {
             final MapCacheRecord that = ((MapCacheRecord) obj);
             return key.equals(that.key) && value.equals(that.value);
         }
-        
+
         return false;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheServer.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheServer.java
index e4a600e..943d6aa 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapCacheServer.java
@@ -55,63 +55,63 @@ public class MapCacheServer extends AbstractCacheServer {
         final String action = dis.readUTF();
         try {
             switch (action) {
-            case "close": {
-                return false;
-            }
-            case "putIfAbsent": {
-                final byte[] key = readValue(dis);
-                final byte[] value = readValue(dis);
-                final MapPutResult putResult = cache.putIfAbsent(ByteBuffer.wrap(key), ByteBuffer.wrap(value));
-                dos.writeBoolean(putResult.isSuccessful());
-                break;
-            }
-            case "containsKey": {
-                final byte[] key = readValue(dis);
-                final boolean contains = cache.containsKey(ByteBuffer.wrap(key));
-                dos.writeBoolean(contains);
-                break;
-            }
-            case "getAndPutIfAbsent": {
-                final byte[] key = readValue(dis);
-                final byte[] value = readValue(dis);
-
-                final MapPutResult putResult = cache.putIfAbsent(ByteBuffer.wrap(key), ByteBuffer.wrap(value));
-                if (putResult.isSuccessful()) {
-                    // Put was successful. There was no old value to get.
-                    dos.writeInt(0);
-                } else {
-                    // we didn't put. Write back the previous value
-                    final byte[] byteArray = putResult.getExistingValue().array();
-                    dos.writeInt(byteArray.length);
-                    dos.write(byteArray);
+                case "close": {
+                    return false;
                 }
-
-                break;
-            }
-            case "get": {
-                final byte[] key = readValue(dis);
-                final ByteBuffer existingValue = cache.get(ByteBuffer.wrap(key));
-                if (existingValue == null) {
-                    // there was no existing value; we did a "put".
-                    dos.writeInt(0);
-                } else {
-                    // a value already existed. we did not update the map
-                    final byte[] byteArray = existingValue.array();
-                    dos.writeInt(byteArray.length);
-                    dos.write(byteArray);
+                case "putIfAbsent": {
+                    final byte[] key = readValue(dis);
+                    final byte[] value = readValue(dis);
+                    final MapPutResult putResult = cache.putIfAbsent(ByteBuffer.wrap(key), ByteBuffer.wrap(value));
+                    dos.writeBoolean(putResult.isSuccessful());
+                    break;
+                }
+                case "containsKey": {
+                    final byte[] key = readValue(dis);
+                    final boolean contains = cache.containsKey(ByteBuffer.wrap(key));
+                    dos.writeBoolean(contains);
+                    break;
+                }
+                case "getAndPutIfAbsent": {
+                    final byte[] key = readValue(dis);
+                    final byte[] value = readValue(dis);
+
+                    final MapPutResult putResult = cache.putIfAbsent(ByteBuffer.wrap(key), ByteBuffer.wrap(value));
+                    if (putResult.isSuccessful()) {
+                        // Put was successful. There was no old value to get.
+                        dos.writeInt(0);
+                    } else {
+                        // we didn't put. Write back the previous value
+                        final byte[] byteArray = putResult.getExistingValue().array();
+                        dos.writeInt(byteArray.length);
+                        dos.write(byteArray);
+                    }
+
+                    break;
+                }
+                case "get": {
+                    final byte[] key = readValue(dis);
+                    final ByteBuffer existingValue = cache.get(ByteBuffer.wrap(key));
+                    if (existingValue == null) {
+                        // there was no existing value; we did a "put".
+                        dos.writeInt(0);
+                    } else {
+                        // a value already existed. we did not update the map
+                        final byte[] byteArray = existingValue.array();
+                        dos.writeInt(byteArray.length);
+                        dos.write(byteArray);
+                    }
+
+                    break;
+                }
+                case "remove": {
+                    final byte[] key = readValue(dis);
+                    final boolean removed = cache.remove(ByteBuffer.wrap(key)) != null;
+                    dos.writeBoolean(removed);
+                    break;
+                }
+                default: {
+                    throw new IOException("Illegal Request");
                 }
-
-                break;
-            }
-            case "remove": {
-                final byte[] key = readValue(dis);
-                final boolean removed = cache.remove(ByteBuffer.wrap(key)) != null;
-                dos.writeBoolean(removed);
-                break;
-            }
-            default: {
-                throw new IOException("Illegal Request");
-            }
             }
         } finally {
             dos.flush();
@@ -131,8 +131,9 @@ public class MapCacheServer extends AbstractCacheServer {
 
     @Override
     protected void finalize() throws Throwable {
-        if (!stopped)
+        if (!stopped) {
             stop();
+        }
     }
 
     private byte[] readValue(final DataInputStream dis) throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapPutResult.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapPutResult.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapPutResult.java
index 29695eb..d0055f3 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapPutResult.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/MapPutResult.java
@@ -19,11 +19,12 @@ package org.apache.nifi.distributed.cache.server.map;
 import java.nio.ByteBuffer;
 
 public class MapPutResult {
+
     private final boolean successful;
     private final ByteBuffer key, value;
     private final ByteBuffer existingValue;
     private final ByteBuffer evictedKey, evictedValue;
-    
+
     public MapPutResult(final boolean successful, final ByteBuffer key, final ByteBuffer value, final ByteBuffer existingValue, final ByteBuffer evictedKey, final ByteBuffer evictedValue) {
         this.successful = successful;
         this.key = key;
@@ -44,7 +45,7 @@ public class MapPutResult {
     public ByteBuffer getValue() {
         return value;
     }
-    
+
     public ByteBuffer getExistingValue() {
         return existingValue;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/PersistentMapCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/PersistentMapCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/PersistentMapCache.java
index 77fb77d..e821fbf 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/PersistentMapCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/PersistentMapCache.java
@@ -38,9 +38,9 @@ public class PersistentMapCache implements MapCache {
 
     private final MapCache wrapped;
     private final WriteAheadRepository<MapWaliRecord> wali;
-    
+
     private final AtomicLong modifications = new AtomicLong(0L);
-    
+
     public PersistentMapCache(final String serviceIdentifier, final File persistencePath, final MapCache cacheToWrap) throws IOException {
         wali = new MinimalLockingWriteAheadLog<>(persistencePath.toPath(), 1, new Serde(), null);
         wrapped = cacheToWrap;
@@ -48,8 +48,8 @@ public class PersistentMapCache implements MapCache {
 
     synchronized void restore() throws IOException {
         final Collection<MapWaliRecord> recovered = wali.recoverRecords();
-        for ( final MapWaliRecord record : recovered ) {
-            if ( record.getUpdateType() == UpdateType.CREATE ) {
+        for (final MapWaliRecord record : recovered) {
+            if (record.getUpdateType() == UpdateType.CREATE) {
                 wrapped.putIfAbsent(record.getKey(), record.getValue());
             }
         }
@@ -58,24 +58,24 @@ public class PersistentMapCache implements MapCache {
     @Override
     public MapPutResult putIfAbsent(final ByteBuffer key, final ByteBuffer value) throws IOException {
         final MapPutResult putResult = wrapped.putIfAbsent(key, value);
-        if ( putResult.isSuccessful() ) {
+        if (putResult.isSuccessful()) {
             // The put was successful.
             final MapWaliRecord record = new MapWaliRecord(UpdateType.CREATE, key, value);
             final List<MapWaliRecord> records = new ArrayList<>();
             records.add(record);
 
-            if ( putResult.getEvictedKey() != null ) {
+            if (putResult.getEvictedKey() != null) {
                 records.add(new MapWaliRecord(UpdateType.DELETE, putResult.getEvictedKey(), putResult.getEvictedValue()));
             }
-            
+
             wali.update(Collections.singletonList(record), false);
-            
+
             final long modCount = modifications.getAndIncrement();
-            if ( modCount > 0 && modCount % 100000 == 0 ) {
+            if (modCount > 0 && modCount % 100000 == 0) {
                 wali.checkpoint();
             }
         }
-        
+
         return putResult;
     }
 
@@ -92,65 +92,64 @@ public class PersistentMapCache implements MapCache {
     @Override
     public ByteBuffer remove(ByteBuffer key) throws IOException {
         final ByteBuffer removeResult = wrapped.remove(key);
-        if ( removeResult != null ) {
+        if (removeResult != null) {
             final MapWaliRecord record = new MapWaliRecord(UpdateType.DELETE, key, removeResult);
             final List<MapWaliRecord> records = new ArrayList<>(1);
             records.add(record);
             wali.update(records, false);
-            
+
             final long modCount = modifications.getAndIncrement();
-            if ( modCount > 0 && modCount % 1000 == 0 ) {
+            if (modCount > 0 && modCount % 1000 == 0) {
                 wali.checkpoint();
             }
         }
         return removeResult;
     }
 
-
     @Override
     public void shutdown() throws IOException {
         wali.shutdown();
     }
 
-
     private static class MapWaliRecord {
+
         private final UpdateType updateType;
         private final ByteBuffer key;
         private final ByteBuffer value;
-        
+
         public MapWaliRecord(final UpdateType updateType, final ByteBuffer key, final ByteBuffer value) {
             this.updateType = updateType;
             this.key = key;
             this.value = value;
         }
-        
+
         public UpdateType getUpdateType() {
             return updateType;
         }
-        
+
         public ByteBuffer getKey() {
             return key;
         }
-        
+
         public ByteBuffer getValue() {
             return value;
         }
     }
-    
+
     private static class Serde implements SerDe<MapWaliRecord> {
 
         @Override
         public void serializeEdit(MapWaliRecord previousRecordState, MapWaliRecord newRecordState, java.io.DataOutputStream out) throws IOException {
             final UpdateType updateType = newRecordState.getUpdateType();
-            if ( updateType == UpdateType.DELETE ) {
+            if (updateType == UpdateType.DELETE) {
                 out.write(0);
             } else {
                 out.write(1);
             }
-            
+
             final byte[] key = newRecordState.getKey().array();
             final byte[] value = newRecordState.getValue().array();
-            
+
             out.writeInt(key.length);
             out.write(key);
             out.writeInt(value.length);
@@ -165,12 +164,12 @@ public class PersistentMapCache implements MapCache {
         @Override
         public MapWaliRecord deserializeEdit(final DataInputStream in, final Map<Object, MapWaliRecord> currentRecordStates, final int version) throws IOException {
             final int updateTypeValue = in.read();
-            if ( updateTypeValue < 0 ) {
+            if (updateTypeValue < 0) {
                 throw new EOFException();
             }
 
             final UpdateType updateType = (updateTypeValue == 0 ? UpdateType.DELETE : UpdateType.CREATE);
-            
+
             final int keySize = in.readInt();
             final byte[] key = new byte[keySize];
             in.readFully(key);
@@ -207,4 +206,4 @@ public class PersistentMapCache implements MapCache {
             return 1;
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/SimpleMapCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/SimpleMapCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/SimpleMapCache.java
index 10139f1..9e8bbd1 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/SimpleMapCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/map/SimpleMapCache.java
@@ -33,46 +33,47 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SimpleMapCache implements MapCache {
+
     private static final Logger logger = LoggerFactory.getLogger(SimpleMapCache.class);
 
     private final Map<ByteBuffer, MapCacheRecord> cache = new HashMap<>();
     private final SortedMap<MapCacheRecord, ByteBuffer> inverseCacheMap;
-    
+
     private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
     private final Lock readLock = rwLock.readLock();
     private final Lock writeLock = rwLock.writeLock();
-    
+
     private final String serviceIdentifier;
-    
+
     private final int maxSize;
-    
+
     public SimpleMapCache(final String serviceIdentifier, final int maxSize, final EvictionPolicy evictionPolicy) {
         // need to change to ConcurrentMap as this is modified when only the readLock is held
         inverseCacheMap = new ConcurrentSkipListMap<>(evictionPolicy.getComparator());
         this.serviceIdentifier = serviceIdentifier;
         this.maxSize = maxSize;
     }
-    
+
     @Override
     public String toString() {
         return "SimpleSetCache[service id=" + serviceIdentifier + "]";
     }
 
-    // don't need synchronized because this method is only called when the writeLock is held, and all 
+    // don't need synchronized because this method is only called when the writeLock is held, and all
     // public methods obtain either the read or write lock
     private MapCacheRecord evict() {
-        if ( cache.size() < maxSize ) {
+        if (cache.size() < maxSize) {
             return null;
         }
-        
+
         final MapCacheRecord recordToEvict = inverseCacheMap.firstKey();
         final ByteBuffer valueToEvict = inverseCacheMap.remove(recordToEvict);
         cache.remove(valueToEvict);
-        
-        if ( logger.isDebugEnabled() ) {
+
+        if (logger.isDebugEnabled()) {
             logger.debug("Evicting value {} from cache", new String(valueToEvict.array(), StandardCharsets.UTF_8));
         }
-        
+
         return recordToEvict;
     }
 
@@ -81,44 +82,44 @@ public class SimpleMapCache implements MapCache {
         writeLock.lock();
         try {
             final MapCacheRecord record = cache.get(key);
-            if ( record == null ) {
+            if (record == null) {
                 // Record is null. We will add.
                 final MapCacheRecord evicted = evict();
                 final MapCacheRecord newRecord = new MapCacheRecord(key, value);
                 cache.put(key, newRecord);
                 inverseCacheMap.put(newRecord, key);
-                
-                if ( evicted == null ) {
+
+                if (evicted == null) {
                     return new MapPutResult(true, key, value, null, null, null);
                 } else {
                     return new MapPutResult(true, key, value, null, evicted.getKey(), evicted.getValue());
                 }
             }
-            
+
             // Record is not null. Increment hit count and return result indicating that record was not added.
             inverseCacheMap.remove(record);
             record.hit();
             inverseCacheMap.put(record, key);
-            
+
             return new MapPutResult(false, key, value, record.getValue(), null, null);
         } finally {
             writeLock.unlock();
         }
     }
-    
+
     @Override
     public boolean containsKey(final ByteBuffer key) {
         readLock.lock();
         try {
             final MapCacheRecord record = cache.get(key);
-            if ( record == null ) {
+            if (record == null) {
                 return false;
             }
-            
+
             inverseCacheMap.remove(record);
             record.hit();
             inverseCacheMap.put(record, key);
-            
+
             return true;
         } finally {
             readLock.unlock();
@@ -130,14 +131,14 @@ public class SimpleMapCache implements MapCache {
         readLock.lock();
         try {
             final MapCacheRecord record = cache.get(key);
-            if ( record == null ) {
+            if (record == null) {
                 return null;
             }
-            
+
             inverseCacheMap.remove(record);
             record.hit();
             inverseCacheMap.put(record, key);
-            
+
             return record.getValue();
         } finally {
             readLock.unlock();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/PersistentSetCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/PersistentSetCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/PersistentSetCache.java
index 4d75fc0..c2c3a41 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/PersistentSetCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/PersistentSetCache.java
@@ -38,34 +38,34 @@ public class PersistentSetCache implements SetCache {
 
     private final SetCache wrapped;
     private final WriteAheadRepository<SetRecord> wali;
-    
+
     private final AtomicLong modifications = new AtomicLong(0L);
-    
+
     public PersistentSetCache(final String serviceIdentifier, final File persistencePath, final SetCache cacheToWrap) throws IOException {
         wali = new MinimalLockingWriteAheadLog<>(persistencePath.toPath(), 1, new Serde(), null);
         wrapped = cacheToWrap;
     }
-    
+
     public synchronized void restore() throws IOException {
         final Collection<SetRecord> recovered = wali.recoverRecords();
-        for ( final SetRecord record : recovered ) {
-            if ( record.getUpdateType() == UpdateType.CREATE ) {
+        for (final SetRecord record : recovered) {
+            if (record.getUpdateType() == UpdateType.CREATE) {
                 addIfAbsent(record.getBuffer());
             }
         }
     }
-    
+
     @Override
     public synchronized SetCacheResult remove(final ByteBuffer value) throws IOException {
         final SetCacheResult removeResult = wrapped.remove(value);
-        if ( removeResult.getResult() ) {
+        if (removeResult.getResult()) {
             final SetRecord record = new SetRecord(UpdateType.DELETE, value);
             final List<SetRecord> records = new ArrayList<>();
             records.add(record);
             wali.update(records, false);
-            
+
             final long modCount = modifications.getAndIncrement();
-            if ( modCount > 0 && modCount % 1000 == 0 ) {
+            if (modCount > 0 && modCount % 1000 == 0) {
                 wali.checkpoint();
             }
         }
@@ -76,24 +76,24 @@ public class PersistentSetCache implements SetCache {
     @Override
     public synchronized SetCacheResult addIfAbsent(final ByteBuffer value) throws IOException {
         final SetCacheResult addResult = wrapped.addIfAbsent(value);
-        if ( addResult.getResult() ) {
+        if (addResult.getResult()) {
             final SetRecord record = new SetRecord(UpdateType.CREATE, value);
             final List<SetRecord> records = new ArrayList<>();
             records.add(record);
-            
+
             final SetCacheRecord evictedRecord = addResult.getEvictedRecord();
-            if ( evictedRecord != null ) {
+            if (evictedRecord != null) {
                 records.add(new SetRecord(UpdateType.DELETE, evictedRecord.getValue()));
             }
-            
+
             wali.update(records, false);
-            
+
             final long modCount = modifications.getAndIncrement();
-            if ( modCount > 0 && modCount % 1000 == 0 ) {
+            if (modCount > 0 && modCount % 1000 == 0) {
                 wali.checkpoint();
             }
         }
-        
+
         return addResult;
     }
 
@@ -101,45 +101,46 @@ public class PersistentSetCache implements SetCache {
     public synchronized SetCacheResult contains(final ByteBuffer value) throws IOException {
         return wrapped.contains(value);
     }
-    
+
     @Override
     public void shutdown() throws IOException {
         wali.shutdown();
     }
-    
+
     private static class SetRecord {
+
         private final UpdateType updateType;
         private final ByteBuffer value;
-        
+
         public SetRecord(final UpdateType updateType, final ByteBuffer value) {
             this.updateType = updateType;
             this.value = value;
         }
-        
+
         public UpdateType getUpdateType() {
             return updateType;
         }
-        
+
         public ByteBuffer getBuffer() {
             return value;
         }
-        
+
         public byte[] getData() {
             return value.array();
         }
     }
-    
+
     private static class Serde implements SerDe<SetRecord> {
 
         @Override
         public void serializeEdit(final SetRecord previousRecordState, final SetRecord newRecordState, final DataOutputStream out) throws IOException {
             final UpdateType updateType = newRecordState.getUpdateType();
-            if ( updateType == UpdateType.DELETE ) {
+            if (updateType == UpdateType.DELETE) {
                 out.write(0);
             } else {
                 out.write(1);
             }
-            
+
             final byte[] data = newRecordState.getData();
             out.writeInt(data.length);
             out.write(newRecordState.getData());
@@ -153,16 +154,16 @@ public class PersistentSetCache implements SetCache {
         @Override
         public SetRecord deserializeEdit(final DataInputStream in, final Map<Object, SetRecord> currentRecordStates, final int version) throws IOException {
             final int value = in.read();
-            if ( value < 0 ) {
+            if (value < 0) {
                 throw new EOFException();
             }
 
             final UpdateType updateType = (value == 0 ? UpdateType.DELETE : UpdateType.CREATE);
-            
+
             final int size = in.readInt();
             final byte[] data = new byte[size];
             in.readFully(data);
-            
+
             return new SetRecord(updateType, ByteBuffer.wrap(data));
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCache.java
index bf6ae3e..dd37d0c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCache.java
@@ -22,8 +22,11 @@ import java.nio.ByteBuffer;
 public interface SetCache {
 
     SetCacheResult remove(ByteBuffer value) throws IOException;
+
     SetCacheResult addIfAbsent(ByteBuffer value) throws IOException;
+
     SetCacheResult contains(ByteBuffer value) throws IOException;
+
     void shutdown() throws IOException;
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheRecord.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheRecord.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheRecord.java
index 20b6fae..5a75775 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheRecord.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheRecord.java
@@ -21,33 +21,34 @@ import java.nio.ByteBuffer;
 import org.apache.nifi.distributed.cache.server.CacheRecord;
 
 public class SetCacheRecord extends CacheRecord {
+
     private final ByteBuffer value;
-    
+
     public SetCacheRecord(final ByteBuffer value) {
         this.value = value;
     }
-    
+
     public ByteBuffer getValue() {
         return value;
     }
-    
+
     @Override
     public int hashCode() {
         return value.hashCode();
     }
-    
+
     @Override
     public boolean equals(final Object obj) {
-        if ( this == obj ) {
+        if (this == obj) {
             return true;
         }
-        
+
         if (obj instanceof SetCacheRecord) {
             return value.equals(((SetCacheRecord) obj).value);
         }
         return false;
     }
-    
+
     @Override
     public String toString() {
         return "SetCacheRecord[value=" + new String(value.array()) + ", hitCount=" + getHitCount() + "]";

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheResult.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheResult.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheResult.java
index 732c4f0..7faceb6 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheResult.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SetCacheResult.java
@@ -16,27 +16,26 @@
  */
 package org.apache.nifi.distributed.cache.server.set;
 
-
-
 public class SetCacheResult {
+
     private final boolean result;
     private final SetCacheRecord stats;
     private final SetCacheRecord evictedRecord;
-    
+
     public SetCacheResult(final boolean result, final SetCacheRecord stats, final SetCacheRecord evictedRecord) {
         this.result = result;
         this.stats = stats;
         this.evictedRecord = evictedRecord;
     }
-    
+
     public boolean getResult() {
         return result;
     }
-    
+
     public SetCacheRecord getRecord() {
         return stats;
     }
-    
+
     public SetCacheRecord getEvictedRecord() {
         return evictedRecord;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/9a3b6bed/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SimpleSetCache.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SimpleSetCache.java b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SimpleSetCache.java
index 77d6481..bf69ba7 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SimpleSetCache.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/main/java/org/apache/nifi/distributed/cache/server/set/SimpleSetCache.java
@@ -30,41 +30,42 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SimpleSetCache implements SetCache {
+
     private static final Logger logger = LoggerFactory.getLogger(SimpleSetCache.class);
-    
+
     private final Map<ByteBuffer, SetCacheRecord> cache = new HashMap<>();
     private final SortedMap<SetCacheRecord, ByteBuffer> inverseCacheMap;
-    
+
     private final String serviceIdentifier;
-    
+
     private final int maxSize;
-    
+
     public SimpleSetCache(final String serviceIdentifier, final int maxSize, final EvictionPolicy evictionPolicy) {
         inverseCacheMap = new TreeMap<>(evictionPolicy.getComparator());
         this.serviceIdentifier = serviceIdentifier;
         this.maxSize = maxSize;
     }
-    
+
     private synchronized SetCacheRecord evict() {
-        if ( cache.size() < maxSize ) {
+        if (cache.size() < maxSize) {
             return null;
         }
-        
+
         final SetCacheRecord recordToEvict = inverseCacheMap.firstKey();
         final ByteBuffer valueToEvict = inverseCacheMap.remove(recordToEvict);
         cache.remove(valueToEvict);
-        
-        if ( logger.isDebugEnabled() ) {
+
+        if (logger.isDebugEnabled()) {
             logger.debug("Evicting value {} from cache", new String(valueToEvict.array(), StandardCharsets.UTF_8));
         }
-        
+
         return recordToEvict;
     }
-    
+
     @Override
     public synchronized SetCacheResult addIfAbsent(final ByteBuffer value) {
         final SetCacheRecord record = cache.get(value);
-        if ( record == null ) {
+        if (record == null) {
             final SetCacheRecord evicted = evict();
             final SetCacheRecord newRecord = new SetCacheRecord(value);
             cache.put(value, newRecord);
@@ -75,42 +76,42 @@ public class SimpleSetCache implements SetCache {
             inverseCacheMap.remove(record);
             record.hit();
             inverseCacheMap.put(record, value);
-            
+
             return new SetCacheResult(false, record, null);
         }
     }
-    
+
     @Override
     public synchronized SetCacheResult contains(final ByteBuffer value) {
         final SetCacheRecord record = cache.get(value);
-        if ( record == null ) {
+        if (record == null) {
             return new SetCacheResult(false, null, null);
         } else {
             // We have to remove the record and add it again in order to cause the Map to stay sorted
             inverseCacheMap.remove(record);
             record.hit();
             inverseCacheMap.put(record, value);
-            
+
             return new SetCacheResult(true, record, null);
         }
     }
-    
+
     @Override
     public synchronized SetCacheResult remove(final ByteBuffer value) {
         final SetCacheRecord record = cache.remove(value);
-        if ( record == null ) {
+        if (record == null) {
             return new SetCacheResult(false, null, null);
         } else {
             inverseCacheMap.remove(record);
             return new SetCacheResult(true, record, null);
         }
     }
-    
+
     @Override
     public String toString() {
         return "SimpleSetCache[service id=" + serviceIdentifier + "]";
     }
-    
+
     @Override
     public void shutdown() throws IOException {
     }


[28/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileFlowFileRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileFlowFileRepository.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileFlowFileRepository.java
index 9e429d6..fe34fe0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileFlowFileRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileFlowFileRepository.java
@@ -26,9 +26,7 @@ import org.apache.nifi.controller.repository.claim.ContentClaimManager;
 
 /**
  * <p>
- * An in-memory implementation of the {@link FlowFileRepository}. Upon restart,
- * all FlowFiles will be discarded, including those that have been swapped out
- * by a {@link FlowFileSwapManager}.
+ * An in-memory implementation of the {@link FlowFileRepository}. Upon restart, all FlowFiles will be discarded, including those that have been swapped out by a {@link FlowFileSwapManager}.
  * </p>
  */
 public class VolatileFlowFileRepository implements FlowFileRepository {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/WriteAheadFlowFileRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/WriteAheadFlowFileRepository.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/WriteAheadFlowFileRepository.java
index 0779c4d..f2df821 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/WriteAheadFlowFileRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/WriteAheadFlowFileRepository.java
@@ -62,21 +62,12 @@ import org.wali.WriteAheadRepository;
  * </p>
  *
  * <p>
- * We expose a property named <code>nifi.flowfile.repository.always.sync</code>
- * that is a boolean value indicating whether or not to force WALI to sync with
- * disk on each update. By default, the value is <code>false</code>. This is
- * needed only in situations in which power loss is expected and not mitigated
- * by Uninterruptable Power Sources (UPS) or when running in an unstable Virtual
- * Machine for instance. Otherwise, we will flush the data that is written to
- * the Operating System and the Operating System will be responsible to flush
- * its buffers when appropriate. The Operating System can be configured to hold
- * only a certain buffer size or not to buffer at all, as well. When using a
- * UPS, this is generally not an issue, as the machine is typically notified
- * before dying, in which case the Operating System will flush the data to disk.
- * Additionally, most disks on enterprise servers also have battery backups that
- * can power the disks long enough to flush their buffers. For this reason, we
- * choose instead to not sync to disk for every write but instead sync only when
- * we checkpoint.
+ * We expose a property named <code>nifi.flowfile.repository.always.sync</code> that is a boolean value indicating whether or not to force WALI to sync with disk on each update. By default, the value
+ * is <code>false</code>. This is needed only in situations in which power loss is expected and not mitigated by Uninterruptable Power Sources (UPS) or when running in an unstable Virtual Machine for
+ * instance. Otherwise, we will flush the data that is written to the Operating System and the Operating System will be responsible to flush its buffers when appropriate. The Operating System can be
+ * configured to hold only a certain buffer size or not to buffer at all, as well. When using a UPS, this is generally not an issue, as the machine is typically notified before dying, in which case
+ * the Operating System will flush the data to disk. Additionally, most disks on enterprise servers also have battery backups that can power the disks long enough to flush their buffers. For this
+ * reason, we choose instead to not sync to disk for every write but instead sync only when we checkpoint.
  * </p>
  */
 public class WriteAheadFlowFileRepository implements FlowFileRepository, SyncListener {
@@ -263,9 +254,7 @@ public class WriteAheadFlowFileRepository implements FlowFileRepository, SyncLis
     }
 
     /**
-     * Swaps the FlowFiles that live on the given Connection out to disk, using
-     * the specified Swap File and returns the number of FlowFiles that were
-     * persisted.
+     * Swaps the FlowFiles that live on the given Connection out to disk, using the specified Swap File and returns the number of FlowFiles that were persisted.
      *
      * @param queue queue to swap out
      * @param swapLocation location to swap to

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/ContentDirection.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/ContentDirection.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/ContentDirection.java
index b5d70b2..83901ef 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/ContentDirection.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/ContentDirection.java
@@ -17,19 +17,16 @@
 package org.apache.nifi.controller.repository.claim;
 
 /**
- * Specifies one side of the Provenance Event for which the Content Claim is
- * being referenced
+ * Specifies one side of the Provenance Event for which the Content Claim is being referenced
  */
 public enum ContentDirection {
 
     /**
-     * Indicates the Content Claim that was the Input to the Process that
-     * generating a Provenance Event
+     * Indicates the Content Claim that was the Input to the Process that generating a Provenance Event
      */
     INPUT,
     /**
-     * Indicates the Content Claim that is the Output of the process that
-     * generated the Provenance Event.
+     * Indicates the Content Claim that is the Output of the process that generated the Provenance Event.
      */
     OUTPUT;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/StandardContentClaim.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/StandardContentClaim.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/StandardContentClaim.java
index 54a1b2c..a8a6963 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/StandardContentClaim.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/claim/StandardContentClaim.java
@@ -20,9 +20,7 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  * <p>
- * A ContentClaim is a reference to a given flow file's content. Multiple flow
- * files may reference the same content by both having the same content
- * claim.</p>
+ * A ContentClaim is a reference to a given flow file's content. Multiple flow files may reference the same content by both having the same content claim.</p>
  *
  * <p>
  * Must be thread safe</p>
@@ -88,8 +86,7 @@ public final class StandardContentClaim implements ContentClaim, Comparable<Cont
     }
 
     /**
-     * Provides the natural ordering for ContentClaim objects. By default they
-     * are sorted by their id, then container, then section
+     * Provides the natural ordering for ContentClaim objects. By default they are sorted by their id, then container, then section
      *
      * @param other other claim
      * @return x such that x <=1 if this is less than other;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseInputStream.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseInputStream.java
index ddcf6c9..1ab85ff 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseInputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseInputStream.java
@@ -20,9 +20,7 @@ import java.io.IOException;
 import java.io.InputStream;
 
 /**
- * Wraps an existing InputStream, so that when {@link InputStream#close()} is
- * called, the underlying InputStream is NOT closed but this InputStream can no
- * longer be written to
+ * Wraps an existing InputStream, so that when {@link InputStream#close()} is called, the underlying InputStream is NOT closed but this InputStream can no longer be written to
  */
 public class DisableOnCloseInputStream extends InputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseOutputStream.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseOutputStream.java
index 720e7f5..4845d60 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseOutputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/DisableOnCloseOutputStream.java
@@ -20,9 +20,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 
 /**
- * Wraps an existing OutputStream, so that when {@link OutputStream#close()} is
- * called, the underlying OutputStream is NOT closed but this OutputStream can
- * no longer be written to
+ * Wraps an existing OutputStream, so that when {@link OutputStream#close()} is called, the underlying OutputStream is NOT closed but this OutputStream can no longer be written to
  */
 public class DisableOnCloseOutputStream extends OutputStream {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessInputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessInputStream.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessInputStream.java
index a710070..946f042 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessInputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessInputStream.java
@@ -27,12 +27,8 @@ import org.apache.nifi.processor.exception.FlowFileAccessException;
 
 /**
  * <p>
- * Wraps an InputStream so that if any IOException is thrown, it will be wrapped
- * in a FlowFileAccessException. We do this to isolate IOExceptions thrown by
- * the framework from those thrown by user code. If thrown by the framework, it
- * generally indicates a problem communicating with the Content Repository and
- * session rollback is often appropriate so that the FlowFile can be processed
- * again.
+ * Wraps an InputStream so that if any IOException is thrown, it will be wrapped in a FlowFileAccessException. We do this to isolate IOExceptions thrown by the framework from those thrown by user
+ * code. If thrown by the framework, it generally indicates a problem communicating with the Content Repository and session rollback is often appropriate so that the FlowFile can be processed again.
  * </p>
  */
 public class FlowFileAccessInputStream extends FilterInputStream {
@@ -56,8 +52,7 @@ public class FlowFileAccessInputStream extends FilterInputStream {
     }
 
     /**
-     * @return the ContentNotFoundException that was thrown by this stream, or
-     * <code>null</code> if no such Exception was thrown
+     * @return the ContentNotFoundException that was thrown by this stream, or <code>null</code> if no such Exception was thrown
      */
     public ContentNotFoundException getContentNotFoundException() {
         return thrown;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessOutputStream.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessOutputStream.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessOutputStream.java
index 744e3a6..f4edcfe 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessOutputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/FlowFileAccessOutputStream.java
@@ -25,12 +25,9 @@ import org.apache.nifi.processor.exception.FlowFileAccessException;
 
 /**
  * <p>
- * Wraps an OutputStream so that if any IOException is thrown, it will be
- * wrapped in a FlowFileAccessException. We do this to isolate IOExceptions
- * thrown by the framework from those thrown by user code. If thrown by the
- * framework, it generally indicates a problem communicating with the Content
- * Repository (such as out of disk space) and session rollback is often
- * appropriate so that the FlowFile can be processed again.
+ * Wraps an OutputStream so that if any IOException is thrown, it will be wrapped in a FlowFileAccessException. We do this to isolate IOExceptions thrown by the framework from those thrown by user
+ * code. If thrown by the framework, it generally indicates a problem communicating with the Content Repository (such as out of disk space) and session rollback is often appropriate so that the
+ * FlowFile can be processed again.
  * </p>
  */
 public class FlowFileAccessOutputStream extends FilterOutputStream {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/LongHolder.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/LongHolder.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/LongHolder.java
index 932cf9c..bd5fec1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/LongHolder.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/io/LongHolder.java
@@ -17,10 +17,8 @@
 package org.apache.nifi.controller.repository.io;
 
 /**
- * Class to hold a long value that can be incremented and decremented. This
- * allows the abstraction of passing a long value by reference, rather than by
- * value, without the overhead of synchronization required by the use of an
- * AtomicLong.
+ * Class to hold a long value that can be incremented and decremented. This allows the abstraction of passing a long value by reference, rather than by value, without the overhead of synchronization
+ * required by the use of an AtomicLong.
  */
 public class LongHolder {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ConnectableProcessContext.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ConnectableProcessContext.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ConnectableProcessContext.java
index 01285b0..7617e7c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ConnectableProcessContext.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ConnectableProcessContext.java
@@ -39,8 +39,7 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 
 /**
- * This class is essentially an empty shell for {@link Connectable}s that are
- * not Processors
+ * This class is essentially an empty shell for {@link Connectable}s that are not Processors
  */
 public class ConnectableProcessContext implements ProcessContext {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ScheduleState.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ScheduleState.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ScheduleState.java
index cb7f55f..ea0b456 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ScheduleState.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/ScheduleState.java
@@ -62,12 +62,11 @@ public class ScheduleState {
     }
 
     /**
-     * Maintains an AtomicBoolean so that the first thread to call this method
-     * after a Processor is no longer scheduled to run will receive a
-     * <code>true</code> and MUST call the methods annotated with @OnStopped
+     * Maintains an AtomicBoolean so that the first thread to call this method after a Processor is no longer scheduled to run will receive a <code>true</code> and MUST call the methods annotated with
      *
-     * @return <code>true</code> if the caller is required to call Processor
-     * methods annotated with
+     * @OnStopped
+     *
+     * @return <code>true</code> if the caller is required to call Processor methods annotated with
      * @OnStopped, <code>false</code> otherwise
      */
     public boolean mustCallOnStoppedMethods() {
@@ -75,8 +74,7 @@ public class ScheduleState {
     }
 
     /**
-     * Establishes the list of relevant futures for this processor. Replaces any
-     * previously held futures.
+     * Establishes the list of relevant futures for this processor. Replaces any previously held futures.
      *
      * @param newFutures futures
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/StandardProcessScheduler.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/StandardProcessScheduler.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/StandardProcessScheduler.java
index bb565cb..ffa669d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/StandardProcessScheduler.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/scheduling/StandardProcessScheduler.java
@@ -69,8 +69,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Responsible for scheduling Processors, Ports, and Funnels to run at regular
- * intervals
+ * Responsible for scheduling Processors, Ports, and Funnels to run at regular intervals
  */
 public final class StandardProcessScheduler implements ProcessScheduler {
 
@@ -257,10 +256,8 @@ public final class StandardProcessScheduler implements ProcessScheduler {
     }
 
     /**
-     * Starts scheduling the given processor to run after invoking all methods
-     * on the underlying {@link nifi.processor.Processor
-     * FlowFileProcessor} that are annotated with the {@link OnScheduled}
-     * annotation.
+     * Starts scheduling the given processor to run after invoking all methods on the underlying {@link nifi.processor.Processor
+     * FlowFileProcessor} that are annotated with the {@link OnScheduled} annotation.
      */
     @Override
     public synchronized void startProcessor(final ProcessorNode procNode) {
@@ -379,9 +376,8 @@ public final class StandardProcessScheduler implements ProcessScheduler {
     }
 
     /**
-     * Stops scheduling the given processor to run and invokes all methods on
-     * the underlying {@link nifi.processor.Processor FlowFileProcessor} that
-     * are annotated with the {@link OnUnscheduled} annotation.
+     * Stops scheduling the given processor to run and invokes all methods on the underlying {@link nifi.processor.Processor FlowFileProcessor} that are annotated with the {@link OnUnscheduled}
+     * annotation.
      */
     @Override
     public synchronized void stopProcessor(final ProcessorNode procNode) {
@@ -577,9 +573,7 @@ public final class StandardProcessScheduler implements ProcessScheduler {
     }
 
     /**
-     * Returns the ScheduleState that is registered for the given component; if
-     * no ScheduleState current is registered, one is created and registered
-     * atomically, and then that value is returned.
+     * Returns the ScheduleState that is registered for the given component; if no ScheduleState current is registered, one is created and registered atomically, and then that value is returned.
      *
      * @param schedulable schedulable
      * @return scheduled state

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java
index a45bf76..d8506c1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java
@@ -518,10 +518,8 @@ public class StandardControllerServiceProvider implements ControllerServiceProvi
     }
 
     /**
-     * Returns a List of all components that reference the given referencedNode
-     * (either directly or indirectly through another service) that are also of
-     * the given componentType. The list that is returned is in the order in
-     * which they will need to be 'activated' (enabled/started).
+     * Returns a List of all components that reference the given referencedNode (either directly or indirectly through another service) that are also of the given componentType. The list that is
+     * returned is in the order in which they will need to be 'activated' (enabled/started).
      *
      * @param referencedNode node
      * @param componentType type

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/status/history/StandardStatusSnapshot.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/status/history/StandardStatusSnapshot.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/status/history/StandardStatusSnapshot.java
index e1fdca8..abaf899 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/status/history/StandardStatusSnapshot.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/status/history/StandardStatusSnapshot.java
@@ -53,7 +53,7 @@ public class StandardStatusSnapshot implements StatusSnapshot {
             public StatusSnapshot reduce(final List<StatusSnapshot> values) {
                 Date reducedTimestamp = null;
                 final Set<MetricDescriptor<?>> allDescriptors = new LinkedHashSet<>(metricValues.keySet());
-                
+
                 for (final StatusSnapshot statusSnapshot : values) {
                     if (reducedTimestamp == null) {
                         reducedTimestamp = statusSnapshot.getTimestamp();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunConnectableTask.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunConnectableTask.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunConnectableTask.java
index f3cbb90..a824ad0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunConnectableTask.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunConnectableTask.java
@@ -35,9 +35,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Continually runs a Connectable as long as the processor has work to do.
- * {@link #call()} will return <code>true</code> if the Connectable should be
- * yielded, <code>false</code> otherwise.
+ * Continually runs a Connectable as long as the processor has work to do. {@link #call()} will return <code>true</code> if the Connectable should be yielded, <code>false</code> otherwise.
  */
 public class ContinuallyRunConnectableTask implements Callable<Boolean> {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunProcessorTask.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunProcessorTask.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunProcessorTask.java
index baed6ae..efa5814 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunProcessorTask.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ContinuallyRunProcessorTask.java
@@ -44,9 +44,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Continually runs a processor as long as the processor has work to do.
- * {@link #call()} will return <code>true</code> if the processor should be
- * yielded, <code>false</code> otherwise.
+ * Continually runs a processor as long as the processor has work to do. {@link #call()} will return <code>true</code> if the processor should be yielded, <code>false</code> otherwise.
  */
 public class ContinuallyRunProcessorTask implements Callable<Boolean> {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ExpireFlowFiles.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ExpireFlowFiles.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ExpireFlowFiles.java
index a351a68..7d8bcec 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ExpireFlowFiles.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ExpireFlowFiles.java
@@ -34,9 +34,7 @@ import org.apache.nifi.groups.RemoteProcessGroup;
 import org.apache.nifi.util.FormatUtils;
 
 /**
- * This task runs through all Connectable Components and goes through its
- * incoming queues, polling for FlowFiles and accepting none. This causes the
- * desired side effect of expiring old FlowFiles.
+ * This task runs through all Connectable Components and goes through its incoming queues, polling for FlowFiles and accepting none. This causes the desired side effect of expiring old FlowFiles.
  */
 public class ExpireFlowFiles implements Runnable {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/encrypt/StringEncryptor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/encrypt/StringEncryptor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/encrypt/StringEncryptor.java
index fccd10e..5de1beb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/encrypt/StringEncryptor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/encrypt/StringEncryptor.java
@@ -26,16 +26,14 @@ import org.jasypt.exceptions.EncryptionOperationNotPossibleException;
 
 /**
  * <p>
- * An application specific string encryptor that collects configuration from the
- * application properties, system properties, and/or system environment.
+ * An application specific string encryptor that collects configuration from the application properties, system properties, and/or system environment.
  * </p>
  *
  * <p>
  * Instance of this class are thread-safe</p>
  *
  * <p>
- * The encryption provider and algorithm is configured using the application
- * properties:
+ * The encryption provider and algorithm is configured using the application properties:
  * <ul>
  * <li>nifi.sensitive.props.provider</li>
  * <li>nifi.sensitive.props.algorithm</li>
@@ -73,12 +71,10 @@ public final class StringEncryptor {
     }
 
     /**
-     * Creates an instance of the nifi sensitive property encryptor. Validates
-     * that the encryptor is actually working.
+     * Creates an instance of the nifi sensitive property encryptor. Validates that the encryptor is actually working.
      *
      * @return encryptor
-     * @throws EncryptionException if any issues arise initializing or
-     * validating the encryptor
+     * @throws EncryptionException if any issues arise initializing or validating the encryptor
      */
     public static StringEncryptor createEncryptor() throws EncryptionException {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/engine/FlowEngine.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/engine/FlowEngine.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/engine/FlowEngine.java
index 3be178f..d407e2f 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/engine/FlowEngine.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/engine/FlowEngine.java
@@ -34,8 +34,7 @@ public final class FlowEngine extends ScheduledThreadPoolExecutor {
     /**
      * Creates a new instance of FlowEngine
      *
-     * @param corePoolSize the maximum number of threads available to tasks
-     * running in the engine.
+     * @param corePoolSize the maximum number of threads available to tasks running in the engine.
      * @param threadNamePrefix for naming the thread
      */
     public FlowEngine(int corePoolSize, final String threadNamePrefix) {
@@ -45,11 +44,9 @@ public final class FlowEngine extends ScheduledThreadPoolExecutor {
     /**
      * Creates a new instance of FlowEngine
      *
-     * @param corePoolSize the maximum number of threads available to tasks
-     * running in the engine.
+     * @param corePoolSize the maximum number of threads available to tasks running in the engine.
      * @param threadNamePrefix for thread naming
-     * @param daemon if true, the thread pool will be populated with daemon
-     * threads, otherwise the threads will not be marked as daemon.
+     * @param daemon if true, the thread pool will be populated with daemon threads, otherwise the threads will not be marked as daemon.
      */
     public FlowEngine(int corePoolSize, final String threadNamePrefix, final boolean daemon) {
         super(corePoolSize);
@@ -70,8 +67,7 @@ public final class FlowEngine extends ScheduledThreadPoolExecutor {
     }
 
     /**
-     * Hook method called by the running thread whenever a runnable task is
-     * given to the thread to run.
+     * Hook method called by the running thread whenever a runnable task is given to the thread to run.
      *
      * @param thread thread
      * @param runnable runnable
@@ -84,9 +80,7 @@ public final class FlowEngine extends ScheduledThreadPoolExecutor {
     }
 
     /**
-     * Hook method called by the thread that executed the given runnable after
-     * execution of the runnable completed. Logs the fact of completion and any
-     * errors that might have occured.
+     * Hook method called by the thread that executed the given runnable after execution of the runnable completed. Logs the fact of completion and any errors that might have occured.
      *
      * @param runnable runnable
      * @param throwable throwable

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/events/VolatileBulletinRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/events/VolatileBulletinRepository.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/events/VolatileBulletinRepository.java
index e8708bd..a20e974 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/events/VolatileBulletinRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/events/VolatileBulletinRepository.java
@@ -176,9 +176,8 @@ public class VolatileBulletinRepository implements BulletinRepository {
     }
 
     /**
-     * Overrides the default bulletin processing strategy. When a custom
-     * bulletin strategy is employed, bulletins will not be persisted in this
-     * repository and will sent to the specified strategy instead.
+     * Overrides the default bulletin processing strategy. When a custom bulletin strategy is employed, bulletins will not be persisted in this repository and will sent to the specified strategy
+     * instead.
      *
      * @param strategy bulletin strategy
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java
index 27eca37..f8ca2f0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/fingerprint/FingerprintFactory.java
@@ -72,15 +72,10 @@ import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
 /**
- * Creates a fingerprint of a flow.xml. The order of elements or attributes in
- * the flow.xml does not influence the fingerprint generation.
+ * Creates a fingerprint of a flow.xml. The order of elements or attributes in the flow.xml does not influence the fingerprint generation.
  *
- * Only items in the flow.xml that influence the processing of data are
- * incorporated into the fingerprint. Examples of items involved in the
- * fingerprint are: processor IDs, processor relationships, and processor
- * properties. Examples of items not involved in the fingerprint are: items in
- * the processor "settings" or "comments" tabs, position information, flow
- * controller settings, and counters.
+ * Only items in the flow.xml that influence the processing of data are incorporated into the fingerprint. Examples of items involved in the fingerprint are: processor IDs, processor relationships,
+ * and processor properties. Examples of items not involved in the fingerprint are: items in the processor "settings" or "comments" tabs, position information, flow controller settings, and counters.
  *
  */
 public final class FingerprintFactory {
@@ -125,8 +120,7 @@ public final class FingerprintFactory {
     }
 
     /**
-     * Creates a fingerprint of a flow. The order of elements or attributes in
-     * the flow does not influence the fingerprint generation.
+     * Creates a fingerprint of a flow. The order of elements or attributes in the flow does not influence the fingerprint generation.
      *
      * @param flowBytes the flow represented as bytes
      * @param controller the controller
@@ -183,8 +177,7 @@ public final class FingerprintFactory {
     }
 
     /**
-     * Creates a fingerprint of a Collection of Templates The order of the
-     * templates does not influence the fingerprint generation.
+     * Creates a fingerprint of a Collection of Templates The order of the templates does not influence the fingerprint generation.
      *
      *
      * @param templates collection of templates

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
index e0181ea..c7baef4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
@@ -1805,14 +1805,11 @@ public final class StandardProcessGroup implements ProcessGroup {
     }
 
     /**
-     * Verifies that all ID's defined within the given snippet reference
-     * components within this ProcessGroup. If this is not the case, throws
-     * {@link IllegalStateException}.
+     * Verifies that all ID's defined within the given snippet reference components within this ProcessGroup. If this is not the case, throws {@link IllegalStateException}.
      *
      * @param snippet the snippet
      * @throws NullPointerException if the argument is null
-     * @throws IllegalStateException if the snippet contains an ID that
-     * references a component that is not part of this ProcessGroup
+     * @throws IllegalStateException if the snippet contains an ID that references a component that is not part of this ProcessGroup
      */
     private void verifyContents(final Snippet snippet) throws NullPointerException, IllegalStateException {
         requireNonNull(snippet);
@@ -1829,10 +1826,8 @@ public final class StandardProcessGroup implements ProcessGroup {
 
     /**
      * <p>
-     * Verifies that all ID's specified by the given set exist as keys in the
-     * given Map. If any of the ID's does not exist as a key in the map, will
-     * throw {@link IllegalStateException} indicating the ID that is invalid and
-     * specifying the Component Type.
+     * Verifies that all ID's specified by the given set exist as keys in the given Map. If any of the ID's does not exist as a key in the map, will throw {@link IllegalStateException} indicating the
+     * ID that is invalid and specifying the Component Type.
      * </p>
      *
      * <p>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycle.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycle.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycle.java
index 84f0dbc..72b129c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycle.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycle.java
@@ -17,37 +17,30 @@
 package org.apache.nifi.lifecycle;
 
 /**
- * Represents a start/stop lifecyle for a component.  <code>start</code> should
- * only be called once per lifecyle unless otherwise documented by implementing
- * classes.
+ * Represents a start/stop lifecyle for a component.  <code>start</code> should only be called once per lifecyle unless otherwise documented by implementing classes.
  *
  * @author unattributed
  */
 public interface LifeCycle {
 
     /**
-     * Initiates the start state of the lifecyle. Should not throw an exception
-     * if the component is already running.
+     * Initiates the start state of the lifecyle. Should not throw an exception if the component is already running.
      *
      * @throws LifeCycleStartException if startup or initialization failed
      */
     void start() throws LifeCycleStartException;
 
     /**
-     * Initiates the stop state of the lifecycle. Should not throw an exception
-     * if the component is already stopped.
+     * Initiates the stop state of the lifecycle. Should not throw an exception if the component is already stopped.
      *
-     * @param force true if all efforts such as thread interruption should be
-     * attempted to stop the component; false if a graceful stopping should be
-     * employed
+     * @param force true if all efforts such as thread interruption should be attempted to stop the component; false if a graceful stopping should be employed
      *
      * @throws LifeCycleStopException if the shutdown failed
      */
     void stop(boolean force) throws LifeCycleStopException;
 
     /**
-     * @return true if the component is started, but not yet stopped; false
-     * otherwise
+     * @return true if the component is started, but not yet stopped; false otherwise
      */
     boolean isRunning();
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleException.java
index 297a998..d3bf2bf 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.lifecycle;
 
 /**
- * The base exception for issues encountered during the lifecycle of a class
- * implementing the <code>LifeCycle</code> interface.
+ * The base exception for issues encountered during the lifecycle of a class implementing the <code>LifeCycle</code> interface.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStartException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStartException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStartException.java
index 8d2f726..725d840 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStartException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStartException.java
@@ -17,9 +17,7 @@
 package org.apache.nifi.lifecycle;
 
 /**
- * Represents the exceptional case when a problem is encountered during the
- * startup or initialization of a class implementing the <code>LifeCycle</code>
- * interface.
+ * Represents the exceptional case when a problem is encountered during the startup or initialization of a class implementing the <code>LifeCycle</code> interface.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStopException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStopException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStopException.java
index be3779b..910e0a8 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStopException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/lifecycle/LifeCycleStopException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.lifecycle;
 
 /**
- * Represents the exceptional case when a problem is encountered during the
- * shutdown of a class implementing the <code>LifeCycle</code> interface.
+ * Represents the exceptional case when a problem is encountered during the shutdown of a class implementing the <code>LifeCycle</code> interface.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/persistence/FlowConfigurationDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/persistence/FlowConfigurationDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/persistence/FlowConfigurationDAO.java
index cc3dbea..92a0d84 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/persistence/FlowConfigurationDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/persistence/FlowConfigurationDAO.java
@@ -32,24 +32,18 @@ import org.apache.nifi.controller.UninheritableFlowException;
 public interface FlowConfigurationDAO {
 
     /**
-     * Loads the given controller with the values from the given proposed flow.
-     * If loading the proposed flow configuration would cause the controller to
-     * orphan flow files, then an UninheritableFlowException is thrown.
+     * Loads the given controller with the values from the given proposed flow. If loading the proposed flow configuration would cause the controller to orphan flow files, then an
+     * UninheritableFlowException is thrown.
      *
-     * If the FlowSynchronizationException is thrown, then the controller may
-     * have changed some of its state and should no longer be used.
+     * If the FlowSynchronizationException is thrown, then the controller may have changed some of its state and should no longer be used.
      *
      * @param controller a controller
      * @param dataFlow the flow to load
      * @throws java.io.IOException
      *
-     * @throws FlowSerializationException if proposed flow is not a valid flow
-     * configuration file
-     * @throws UninheritableFlowException if the proposed flow cannot be loaded
-     * by the controller because in doing so would risk orphaning flow files
-     * @throws FlowSynchronizationException if updates to the controller failed.
-     * If this exception is thrown, then the controller should be considered
-     * unsafe to be used
+     * @throws FlowSerializationException if proposed flow is not a valid flow configuration file
+     * @throws UninheritableFlowException if the proposed flow cannot be loaded by the controller because in doing so would risk orphaning flow files
+     * @throws FlowSynchronizationException if updates to the controller failed. If this exception is thrown, then the controller should be considered unsafe to be used
      */
     void load(FlowController controller, DataFlow dataFlow)
             throws IOException, FlowSerializationException, FlowSynchronizationException, UninheritableFlowException;
@@ -76,8 +70,7 @@ public interface FlowConfigurationDAO {
      * @param flow to save
      * @throws NullPointerException if the given flow is null
      * @throws IOException If unable to persist state of given flow
-     * @throws IllegalStateException if FileFlowDAO not in proper state for
-     * saving
+     * @throws IllegalStateException if FileFlowDAO not in proper state for saving
      */
     void save(FlowController flow) throws IOException;
 
@@ -85,12 +78,10 @@ public interface FlowConfigurationDAO {
      * Saves all changes made to the given flow to the given File.
      *
      * @param flow to save
-     * @param outStream the OutputStream to which the FlowController will be
-     * written
+     * @param outStream the OutputStream to which the FlowController will be written
      * @throws NullPointerException if the given flow is null
      * @throws IOException If unable to persist state of given flow
-     * @throws IllegalStateException if FileFlowDAO not in proper state for
-     * saving
+     * @throws IllegalStateException if FileFlowDAO not in proper state for saving
      */
     void save(FlowController flow, OutputStream outStream) throws IOException;
 
@@ -98,12 +89,10 @@ public interface FlowConfigurationDAO {
      * Saves all changes made to the given flow to the given File.
      *
      * @param flow to save
-     * @param archive if true will also attempt to archive the flow
-     * configuration
+     * @param archive if true will also attempt to archive the flow configuration
      * @throws NullPointerException if the given flow is null
      * @throws IOException If unable to persist state of given flow
-     * @throws IllegalStateException if FileFlowDAO not in proper state for
-     * saving
+     * @throws IllegalStateException if FileFlowDAO not in proper state for saving
      */
     void save(FlowController flow, boolean archive) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardProcessContext.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardProcessContext.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardProcessContext.java
index 3e1d1e6..d1bfacf 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardProcessContext.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardProcessContext.java
@@ -65,8 +65,7 @@ public class StandardProcessContext implements ProcessContext, ControllerService
 
     /**
      * <p>
-     * Returns the currently configured value for the property with the given
-     * name.
+     * Returns the currently configured value for the property with the given name.
      * </p>
      */
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardPropertyValue.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardPropertyValue.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardPropertyValue.java
index b320a61..acb86aa 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardPropertyValue.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/processor/StandardPropertyValue.java
@@ -39,15 +39,9 @@ public final class StandardPropertyValue implements PropertyValue {
     }
 
     /**
-     * Constructs a new StandardPropertyValue with the given value & service
-     * lookup and indicates whether or not the rawValue contains any NiFi
-     * Expressions. If it is unknown whether or not the value contains any NiFi
-     * Expressions, the
-     * {@link #StandardPropertyValue(String, ControllerServiceLookup)}
-     * constructor should be used or <code>true</code> should be passed.
-     * However, if it is known that the value contains no NiFi Expression, that
-     * information should be provided so that calls to
-     * {@link #evaluateAttributeExpressions()} are much more efficient
+     * Constructs a new StandardPropertyValue with the given value & service lookup and indicates whether or not the rawValue contains any NiFi Expressions. If it is unknown whether or not the value
+     * contains any NiFi Expressions, the {@link #StandardPropertyValue(String, ControllerServiceLookup)} constructor should be used or <code>true</code> should be passed. However, if it is known that
+     * the value contains no NiFi Expression, that information should be provided so that calls to {@link #evaluateAttributeExpressions()} are much more efficient
      *
      * @param rawValue value
      * @param serviceLookup lookup

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/RemoteNiFiUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/RemoteNiFiUtils.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/RemoteNiFiUtils.java
index bed5279..e80d383 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/RemoteNiFiUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/RemoteNiFiUtils.java
@@ -120,12 +120,9 @@ public class RemoteNiFiUtils {
     }
 
     /**
-     * Returns the port on which the remote instance is listening for Flow File
-     * transfers, or <code>null</code> if the remote instance is not configured
-     * to use Site-to-Site transfers.
+     * Returns the port on which the remote instance is listening for Flow File transfers, or <code>null</code> if the remote instance is not configured to use Site-to-Site transfers.
      *
-     * @param uri the base URI of the remote instance. This should include the
-     * path only to the nifi-api level, as well as the protocol, host, and port.
+     * @param uri the base URI of the remote instance. This should include the path only to the nifi-api level, as well as the protocol, host, and port.
      * @param timeoutMillis wait time in millis
      * @return port number
      * @throws IOException ex
@@ -158,9 +155,7 @@ public class RemoteNiFiUtils {
     }
 
     /**
-     * Returns the port on which the remote instance is listening for Flow File
-     * transfers, or <code>null</code> if the remote instance is not configured
-     * to use Site-to-Site transfers.
+     * Returns the port on which the remote instance is listening for Flow File transfers, or <code>null</code> if the remote instance is not configured to use Site-to-Site transfers.
      *
      * @param uri the full URI to fetch, including the path.
      * @return port

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/StandardRemoteProcessGroup.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/StandardRemoteProcessGroup.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/StandardRemoteProcessGroup.java
index f4b5975..d19b5c1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/StandardRemoteProcessGroup.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/remote/StandardRemoteProcessGroup.java
@@ -72,9 +72,8 @@ import com.sun.jersey.api.client.ClientResponse.Status;
 import com.sun.jersey.api.client.UniformInterfaceException;
 
 /**
- * Represents the Root Process Group of a remote NiFi Instance. Holds
- * information about that remote instance, as well as {@link IncomingPort}s and
- * {@link OutgoingPort}s for communicating with the remote instance.
+ * Represents the Root Process Group of a remote NiFi Instance. Holds information about that remote instance, as well as {@link IncomingPort}s and {@link OutgoingPort}s for communicating with the
+ * remote instance.
  */
 public class StandardRemoteProcessGroup implements RemoteProcessGroup {
 
@@ -325,11 +324,8 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * Changes the currently configured input ports to the ports described in
-     * the given set. If any port is currently configured that is not in the set
-     * given, that port will be shutdown and removed. If any port is currently
-     * not configured and is in the set given, that port will be instantiated
-     * and started.
+     * Changes the currently configured input ports to the ports described in the given set. If any port is currently configured that is not in the set given, that port will be shutdown and removed.
+     * If any port is currently not configured and is in the set given, that port will be instantiated and started.
      *
      * @param ports the new ports
      *
@@ -378,12 +374,10 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * Returns a boolean indicating whether or not an Output Port exists with
-     * the given ID
+     * Returns a boolean indicating whether or not an Output Port exists with the given ID
      *
      * @param id identifier of port
-     * @return <code>true</code> if an Output Port exists with the given ID,
-     * <code>false</code> otherwise.
+     * @return <code>true</code> if an Output Port exists with the given ID, <code>false</code> otherwise.
      */
     public boolean containsOutputPort(final String id) {
         readLock.lock();
@@ -395,11 +389,8 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * Changes the currently configured output ports to the ports described in
-     * the given set. If any port is currently configured that is not in the set
-     * given, that port will be shutdown and removed. If any port is currently
-     * not configured and is in the set given, that port will be instantiated
-     * and started.
+     * Changes the currently configured output ports to the ports described in the given set. If any port is currently configured that is not in the set given, that port will be shutdown and removed.
+     * If any port is currently not configured and is in the set given, that port will be instantiated and started.
      *
      * @param ports the new ports
      *
@@ -452,8 +443,7 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
      *
      *
      * @throws NullPointerException if the given output Port is null
-     * @throws IllegalStateException if the port does not belong to this remote
-     * process group
+     * @throws IllegalStateException if the port does not belong to this remote process group
      */
     @Override
     public void removeNonExistentPort(final RemoteGroupPort port) {
@@ -531,13 +521,11 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * Adds an Output Port to this Remote Process Group that is described by
-     * this DTO.
+     * Adds an Output Port to this Remote Process Group that is described by this DTO.
      *
      * @param descriptor
      *
-     * @throws IllegalStateException if an Output Port already exists with the
-     * ID given by dto.getId()
+     * @throws IllegalStateException if an Output Port already exists with the ID given by dto.getId()
      */
     private void addOutputPort(final RemoteProcessGroupPortDescriptor descriptor) {
         writeLock.lock();
@@ -563,8 +551,7 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
 
     /**
      * @param portIdentifier the ID of the Port to send FlowFiles to
-     * @return {@link RemoteGroupPort} that can be used to send FlowFiles to the
-     * port whose ID is given on the remote instance
+     * @return {@link RemoteGroupPort} that can be used to send FlowFiles to the port whose ID is given on the remote instance
      */
     @Override
     public RemoteGroupPort getInputPort(final String portIdentifier) {
@@ -581,8 +568,7 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * @return a set of {@link OutgoingPort}s used for transmitting FlowFiles to
-     * the remote instance
+     * @return a set of {@link OutgoingPort}s used for transmitting FlowFiles to the remote instance
      */
     @Override
     public Set<RemoteGroupPort> getInputPorts() {
@@ -597,13 +583,11 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * Adds an InputPort to this ProcessGroup that is described by the given
-     * DTO.
+     * Adds an InputPort to this ProcessGroup that is described by the given DTO.
      *
      * @param descriptor port descriptor
      *
-     * @throws IllegalStateException if an Input Port already exists with the ID
-     * given by the ID of the DTO.
+     * @throws IllegalStateException if an Input Port already exists with the ID given by the ID of the DTO.
      */
     private void addInputPort(final RemoteProcessGroupPortDescriptor descriptor) {
         writeLock.lock();
@@ -643,8 +627,7 @@ public class StandardRemoteProcessGroup implements RemoteProcessGroup {
     }
 
     /**
-     * @return a set of {@link RemoteGroupPort}s used for receiving FlowFiles
-     * from the remote instance
+     * @return a set of {@link RemoteGroupPort}s used for receiving FlowFiles from the remote instance
      */
     @Override
     public Set<RemoteGroupPort> getOutputPorts() {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/services/FlowService.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/services/FlowService.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/services/FlowService.java
index 71cf969..e59a8d4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/services/FlowService.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/services/FlowService.java
@@ -29,16 +29,14 @@ import org.apache.nifi.controller.UninheritableFlowException;
 import org.apache.nifi.lifecycle.LifeCycle;
 
 /**
- * Defines the API level services available for carrying out file-based dataflow
- * operations.
+ * Defines the API level services available for carrying out file-based dataflow operations.
  *
  * @author unattributed
  */
 public interface FlowService extends LifeCycle {
 
     /**
-     * Immediately persists the state of the flow controller to the flow.xml
-     * file in a blocking call.
+     * Immediately persists the state of the flow controller to the flow.xml file in a blocking call.
      *
      * @throws NullPointerException if the given flow is null.
      * @throws IOException if any problem occurs creating/modifying file
@@ -46,19 +44,16 @@ public interface FlowService extends LifeCycle {
     void saveFlowChanges() throws IOException;
 
     /**
-     * Immediately persists the state of the flow controller to the given output
-     * stream in a blocking call.
+     * Immediately persists the state of the flow controller to the given output stream in a blocking call.
      *
-     * @param outStream the stream to which the FlowController is to be
-     * persisted
+     * @param outStream the stream to which the FlowController is to be persisted
      * @throws NullPointerException if the given flow is null.
      * @throws IOException if any problem occurs creating/modifying file
      */
     void saveFlowChanges(OutputStream outStream) throws IOException;
 
     /**
-     * Saves the given stream to the flow.xml file on disk. This method does not
-     * change the state of the flow controller.
+     * Saves the given stream to the flow.xml file on disk. This method does not change the state of the flow controller.
      *
      * @param is an input stream
      * @throws IOException if unable to save the flow
@@ -66,10 +61,8 @@ public interface FlowService extends LifeCycle {
     void overwriteFlow(InputStream is) throws IOException;
 
     /**
-     * Asynchronously saves the flow controller. The flow controller will be
-     * copied and immediately returned. If another call to save is made within
-     * that time the latest called state of the flow controller will be used. In
-     * database terms this technique is referred to as 'write-delay'.
+     * Asynchronously saves the flow controller. The flow controller will be copied and immediately returned. If another call to save is made within that time the latest called state of the flow
+     * controller will be used. In database terms this technique is referred to as 'write-delay'.
      *
      * @param delayUnit unit of delay
      * @param delay period of delay
@@ -77,47 +70,35 @@ public interface FlowService extends LifeCycle {
     void saveFlowChanges(TimeUnit delayUnit, long delay);
 
     /**
-     * Asynchronously saves the flow controller. The flow controller will be
-     * copied and immediately returned. If another call to save is made within
-     * that time the latest called state of the flow controller will be used. In
-     * database terms this technique is referred to as 'write-delay'.
+     * Asynchronously saves the flow controller. The flow controller will be copied and immediately returned. If another call to save is made within that time the latest called state of the flow
+     * controller will be used. In database terms this technique is referred to as 'write-delay'.
      *
      * @param delayUnit unit of delay
      * @param delay period of delay
-     * @param archive if true means the user wants the flow configuration to be
-     * archived as well
+     * @param archive if true means the user wants the flow configuration to be archived as well
      */
     void saveFlowChanges(TimeUnit delayUnit, long delay, boolean archive);
 
     /**
      * Stops the flow and underlying repository as determined by user
      *
-     * @param force if true the controller is not allowed to gracefully shut
-     * down.
+     * @param force if true the controller is not allowed to gracefully shut down.
      */
     @Override
     void stop(boolean force);
 
     /**
-     * Loads the flow controller with the given flow. Passing null means that
-     * the local flow on disk will used as the proposed flow. If loading the
-     * proposed flow configuration would cause the controller to orphan flow
-     * files, then an UninheritableFlowException is thrown.
+     * Loads the flow controller with the given flow. Passing null means that the local flow on disk will used as the proposed flow. If loading the proposed flow configuration would cause the
+     * controller to orphan flow files, then an UninheritableFlowException is thrown.
      *
-     * If the FlowSynchronizationException is thrown, then the controller may
-     * have changed some of its state and should no longer be used.
+     * If the FlowSynchronizationException is thrown, then the controller may have changed some of its state and should no longer be used.
      *
      * @param proposedFlow the flow to load
      *
-     * @throws IOException if flow configuration could not be retrieved from
-     * disk
-     * @throws FlowSerializationException if proposed flow is not a valid flow
-     * configuration file
-     * @throws UninheritableFlowException if the proposed flow cannot be loaded
-     * by the controller because in doing so would risk orphaning flow files
-     * @throws FlowSynchronizationException if updates to the controller failed.
-     * If this exception is thrown, then the controller should be considered
-     * unsafe to be used
+     * @throws IOException if flow configuration could not be retrieved from disk
+     * @throws FlowSerializationException if proposed flow is not a valid flow configuration file
+     * @throws UninheritableFlowException if the proposed flow cannot be loaded by the controller because in doing so would risk orphaning flow files
+     * @throws FlowSynchronizationException if updates to the controller failed. If this exception is thrown, then the controller should be considered unsafe to be used
      */
     void load(DataFlow proposedFlow)
             throws IOException, FlowSerializationException, FlowSynchronizationException, UninheritableFlowException;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/FlowControllerFactoryBean.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/FlowControllerFactoryBean.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/FlowControllerFactoryBean.java
index 665738b..6949cf9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/FlowControllerFactoryBean.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/FlowControllerFactoryBean.java
@@ -29,9 +29,7 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
 
 /**
- * Factory bean for creating a singleton FlowController instance. If the
- * application is configured to act as the cluster manager, then null is always
- * returned as the created instance.
+ * Factory bean for creating a singleton FlowController instance. If the application is configured to act as the cluster manager, then null is always returned as the created instance.
  */
 @SuppressWarnings("rawtypes")
 public class FlowControllerFactoryBean implements FactoryBean, ApplicationContextAware {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/StandardFlowServiceFactoryBean.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/StandardFlowServiceFactoryBean.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/StandardFlowServiceFactoryBean.java
index cfd855f..2aed5c2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/StandardFlowServiceFactoryBean.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/spring/StandardFlowServiceFactoryBean.java
@@ -28,9 +28,7 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
 
 /**
- * Factory bean for creating a singleton FlowController instance. If the
- * application is configured to act as the cluster manager, then null is always
- * returned as the created instance.
+ * Factory bean for creating a singleton FlowController instance. If the application is configured to act as the cluster manager, then null is always returned as the created instance.
  */
 @SuppressWarnings("rawtypes")
 public class StandardFlowServiceFactoryBean implements FactoryBean, ApplicationContextAware {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ComponentStatusReport.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ComponentStatusReport.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ComponentStatusReport.java
index 1296004..ca31467 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ComponentStatusReport.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/util/ComponentStatusReport.java
@@ -28,9 +28,7 @@ import org.apache.nifi.controller.status.ProcessorStatus;
 import org.apache.nifi.controller.status.RemoteProcessGroupStatus;
 
 /**
- * ComponentStatusReport is a util class that can be used to "flatten" a
- * ProcessGroupStatus into a collection of Map's so that retrieval of a Status
- * for a particular component is very efficient
+ * ComponentStatusReport is a util class that can be used to "flatten" a ProcessGroupStatus into a collection of Map's so that retrieval of a Status for a particular component is very efficient
  */
 public class ComponentStatusReport {
 


[29/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/e811929f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/e811929f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/e811929f

Branch: refs/heads/NIFI-292
Commit: e811929f5913586b38f5f95d321623da2f07b64a
Parents: 43b2f04
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 14:56:45 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 14:56:45 2015 -0400

----------------------------------------------------------------------
 .../apache/nifi/cluster/HeartbeatPayload.java   |   3 +-
 .../org/apache/nifi/connectable/LocalPort.java  |   5 +-
 .../nifi/connectable/StandardConnection.java    |  10 +-
 .../nifi/controller/FileSystemSwapManager.java  |   6 +-
 .../apache/nifi/controller/FlowController.java  | 214 ++++++-------------
 .../controller/FlowSerializationException.java  |   3 +-
 .../apache/nifi/controller/FlowSerializer.java  |   3 +-
 .../FlowSynchronizationException.java           |   3 +-
 .../nifi/controller/FlowSynchronizer.java       |  25 +--
 .../nifi/controller/FlowUnmarshaller.java       |   4 +-
 .../controller/StandardFlowSynchronizer.java    |  12 +-
 .../nifi/controller/StandardProcessorNode.java  |  64 ++----
 .../apache/nifi/controller/TemplateManager.java |  19 +-
 .../controller/UninheritableFlowException.java  |   3 +-
 .../repository/FileSystemRepository.java        |   4 +-
 .../controller/repository/ProcessContext.java   |  18 +-
 .../repository/ProvenanceEventEnricher.java     |   3 +-
 .../repository/RepositoryPurgeException.java    |   3 +-
 .../repository/StandardFlowFileRecord.java      |   7 +-
 .../repository/StandardProcessSession.java      |  34 +--
 .../repository/StandardProvenanceReporter.java  |   6 +-
 .../repository/StandardRepositoryRecord.java    |   3 +-
 .../StandardRepositoryStatusReport.java         |  12 +-
 .../repository/VolatileContentRepository.java   |  30 +--
 .../repository/VolatileFlowFileRepository.java  |   4 +-
 .../WriteAheadFlowFileRepository.java           |  25 +--
 .../repository/claim/ContentDirection.java      |   9 +-
 .../repository/claim/StandardContentClaim.java  |   7 +-
 .../io/DisableOnCloseInputStream.java           |   4 +-
 .../io/DisableOnCloseOutputStream.java          |   4 +-
 .../io/FlowFileAccessInputStream.java           |  11 +-
 .../io/FlowFileAccessOutputStream.java          |   9 +-
 .../controller/repository/io/LongHolder.java    |   6 +-
 .../scheduling/ConnectableProcessContext.java   |   3 +-
 .../controller/scheduling/ScheduleState.java    |  12 +-
 .../scheduling/StandardProcessScheduler.java    |  18 +-
 .../StandardControllerServiceProvider.java      |   6 +-
 .../status/history/StandardStatusSnapshot.java  |   2 +-
 .../tasks/ContinuallyRunConnectableTask.java    |   4 +-
 .../tasks/ContinuallyRunProcessorTask.java      |   4 +-
 .../nifi/controller/tasks/ExpireFlowFiles.java  |   4 +-
 .../apache/nifi/encrypt/StringEncryptor.java    |  12 +-
 .../java/org/apache/nifi/engine/FlowEngine.java |  16 +-
 .../nifi/events/VolatileBulletinRepository.java |   5 +-
 .../nifi/fingerprint/FingerprintFactory.java    |  17 +-
 .../nifi/groups/StandardProcessGroup.java       |  13 +-
 .../org/apache/nifi/lifecycle/LifeCycle.java    |  17 +-
 .../nifi/lifecycle/LifeCycleException.java      |   3 +-
 .../nifi/lifecycle/LifeCycleStartException.java |   4 +-
 .../nifi/lifecycle/LifeCycleStopException.java  |   3 +-
 .../nifi/persistence/FlowConfigurationDAO.java  |  33 +--
 .../nifi/processor/StandardProcessContext.java  |   3 +-
 .../nifi/processor/StandardPropertyValue.java   |  12 +-
 .../org/apache/nifi/remote/RemoteNiFiUtils.java |  11 +-
 .../nifi/remote/StandardRemoteProcessGroup.java |  49 ++---
 .../org/apache/nifi/services/FlowService.java   |  55 ++---
 .../nifi/spring/FlowControllerFactoryBean.java  |   4 +-
 .../spring/StandardFlowServiceFactoryBean.java  |   4 +-
 .../apache/nifi/util/ComponentStatusReport.java |   4 +-
 .../org/apache/nifi/util/ReflectionUtils.java   |  71 ++----
 .../java/org/apache/nifi/util/SnippetUtils.java |   6 +-
 61 files changed, 305 insertions(+), 663 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/cluster/HeartbeatPayload.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/cluster/HeartbeatPayload.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/cluster/HeartbeatPayload.java
index 093b238..668c5e0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/cluster/HeartbeatPayload.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/cluster/HeartbeatPayload.java
@@ -34,8 +34,7 @@ import org.apache.nifi.diagnostics.SystemDiagnostics;
 import org.apache.nifi.jaxb.CounterAdapter;
 
 /**
- * The payload of the heartbeat. The payload contains status to inform the
- * cluster manager the current workload of this node.
+ * The payload of the heartbeat. The payload contains status to inform the cluster manager the current workload of this node.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/LocalPort.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/LocalPort.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/LocalPort.java
index f0739c2..ceb85f2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/LocalPort.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/LocalPort.java
@@ -37,9 +37,8 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.scheduling.SchedulingStrategy;
 
 /**
- * Provides a mechanism by which <code>FlowFile</code>s can be transferred into
- * and out of a <code>ProcessGroup</code> to and/or from another
- * <code>ProcessGroup</code> within the same instance of NiFi.
+ * Provides a mechanism by which <code>FlowFile</code>s can be transferred into and out of a <code>ProcessGroup</code> to and/or from another <code>ProcessGroup</code> within the same instance of
+ * NiFi.
  */
 public class LocalPort extends AbstractPort {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/StandardConnection.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/StandardConnection.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/StandardConnection.java
index b2feab5..86c9320 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/StandardConnection.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/connectable/StandardConnection.java
@@ -39,9 +39,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 
 /**
- * Models a connection between connectable components. A connection may contain
- * one or more relationships that map the source component to the destination
- * component.
+ * Models a connection between connectable components. A connection may contain one or more relationships that map the source component to the destination component.
  */
 public final class StandardConnection implements Connection {
 
@@ -231,10 +229,8 @@ public final class StandardConnection implements Connection {
     }
 
     /**
-     * Gives this Connection ownership of the given FlowFile and allows the
-     * Connection to hold on to the FlowFile but NOT provide the FlowFile to
-     * consumers. This allows us to ensure that the Connection is not deleted
-     * during the middle of a Session commit.
+     * Gives this Connection ownership of the given FlowFile and allows the Connection to hold on to the FlowFile but NOT provide the FlowFile to consumers. This allows us to ensure that the
+     * Connection is not deleted during the middle of a Session commit.
      *
      * @param flowFile to add
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FileSystemSwapManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FileSystemSwapManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FileSystemSwapManager.java
index 3041ada..604dba9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FileSystemSwapManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FileSystemSwapManager.java
@@ -74,8 +74,7 @@ import org.slf4j.LoggerFactory;
 
 /**
  * <p>
- * An implementation of the {@link FlowFileSwapManager} that swaps FlowFiles
- * to/from local disk
+ * An implementation of the {@link FlowFileSwapManager} that swaps FlowFiles to/from local disk
  * </p>
  */
 public class FileSystemSwapManager implements FlowFileSwapManager {
@@ -562,8 +561,7 @@ public class FileSystemSwapManager implements FlowFileSwapManager {
     }
 
     /**
-     * Recovers FlowFiles from all Swap Files, returning the largest FlowFile ID
-     * that was recovered.
+     * Recovers FlowFiles from all Swap Files, returning the largest FlowFile ID that was recovered.
      *
      * @param queueProvider provider
      * @return the largest FlowFile ID that was recovered

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java
index 0d7699a..e241112 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowController.java
@@ -582,8 +582,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
 
     /**
      * <p>
-     * Causes any processors that were added to the flow with a 'delayStart'
-     * flag of true to now start
+     * Causes any processors that were added to the flow with a 'delayStart' flag of true to now start
      * </p>
      *
      * @param startDelayedComponents true if start
@@ -697,17 +696,14 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      * Creates a connection between two Connectable objects.
      *
      * @param id required ID of the connection
-     * @param name the name of the connection, or <code>null</code> to leave the
-     * connection unnamed
+     * @param name the name of the connection, or <code>null</code> to leave the connection unnamed
      * @param source required source
      * @param destination required destination
      * @param relationshipNames required collection of relationship names
      * @return
      *
-     * @throws NullPointerException if the ID, source, destination, or set of
-     * relationships is null.
-     * @throws IllegalArgumentException if <code>relationships</code> is an
-     * empty collection
+     * @throws NullPointerException if the ID, source, destination, or set of relationships is null.
+     * @throws IllegalArgumentException if <code>relationships</code> is an empty collection
      */
     public Connection createConnection(final String id, final String name, final Connectable source, final Connectable destination, final Collection<String> relationshipNames) {
         final StandardConnection.Builder builder = new StandardConnection.Builder(processScheduler);
@@ -749,8 +745,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      * @param name port name
      * @return new port
      * @throws NullPointerException if the ID or name is not unique
-     * @throws IllegalStateException if an Input Port already exists with the
-     * same name or id.
+     * @throws IllegalStateException if an Input Port already exists with the same name or id.
      */
     public Port createLocalInputPort(String id, String name) {
         id = requireNonNull(id).intern();
@@ -766,8 +761,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      * @param name port name
      * @return new port
      * @throws NullPointerException if the ID or name is not unique
-     * @throws IllegalStateException if an Input Port already exists with the
-     * same name or id.
+     * @throws IllegalStateException if an Input Port already exists with the same name or id.
      */
     public Port createLocalOutputPort(String id, String name) {
         id = requireNonNull(id).intern();
@@ -789,16 +783,14 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
 
     /**
      * <p>
-     * Creates a new ProcessorNode with the given type and identifier and
-     * initializes it invoking the methods annotated with {@link OnAdded}.
+     * Creates a new ProcessorNode with the given type and identifier and initializes it invoking the methods annotated with {@link OnAdded}.
      * </p>
      *
      * @param type processor type
      * @param id processor id
      * @return new processor
      * @throws NullPointerException if either arg is null
-     * @throws ProcessorInstantiationException if the processor cannot be
-     * instantiated for any reason
+     * @throws ProcessorInstantiationException if the processor cannot be instantiated for any reason
      */
     public ProcessorNode createProcessor(final String type, String id) throws ProcessorInstantiationException {
         return createProcessor(type, id, true);
@@ -806,19 +798,15 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
 
     /**
      * <p>
-     * Creates a new ProcessorNode with the given type and identifier and
-     * optionally initializes it.
+     * Creates a new ProcessorNode with the given type and identifier and optionally initializes it.
      * </p>
      *
      * @param type the fully qualified Processor class name
      * @param id the unique ID of the Processor
-     * @param firstTimeAdded whether or not this is the first time this
-     * Processor is added to the graph. If {@code true}, will invoke methods
-     * annotated with the {@link OnAdded} annotation.
+     * @param firstTimeAdded whether or not this is the first time this Processor is added to the graph. If {@code true}, will invoke methods annotated with the {@link OnAdded} annotation.
      * @return new processor node
      * @throws NullPointerException if either arg is null
-     * @throws ProcessorInstantiationException if the processor cannot be
-     * instantiated for any reason
+     * @throws ProcessorInstantiationException if the processor cannot be instantiated for any reason
      */
     @SuppressWarnings("deprecation")
     public ProcessorNode createProcessor(final String type, String id, final boolean firstTimeAdded) throws ProcessorInstantiationException {
@@ -874,8 +862,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return the ExtensionManager used for instantiating Processors,
-     * Prioritizers, etc.
+     * @return the ExtensionManager used for instantiating Processors, Prioritizers, etc.
      */
     public ExtensionManager getExtensionManager() {
         return extensionManager;
@@ -902,15 +889,13 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Creates a Port to use as an Input Port for the root Process Group, which
-     * is used for Site-to-Site communications
+     * Creates a Port to use as an Input Port for the root Process Group, which is used for Site-to-Site communications
      *
      * @param id port id
      * @param name port name
      * @return new port
      * @throws NullPointerException if the ID or name is not unique
-     * @throws IllegalStateException if an Input Port already exists with the
-     * same name or id.
+     * @throws IllegalStateException if an Input Port already exists with the same name or id.
      */
     public Port createRemoteInputPort(String id, String name) {
         id = requireNonNull(id).intern();
@@ -921,16 +906,13 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Creates a Port to use as an Output Port for the root Process Group, which
-     * is used for Site-to-Site communications and will queue flow files waiting
-     * to be delivered to remote instances
+     * Creates a Port to use as an Output Port for the root Process Group, which is used for Site-to-Site communications and will queue flow files waiting to be delivered to remote instances
      *
      * @param id port id
      * @param name port name
      * @return new port
      * @throws NullPointerException if the ID or name is not unique
-     * @throws IllegalStateException if an Input Port already exists with the
-     * same name or id.
+     * @throws IllegalStateException if an Input Port already exists with the same name or id.
      */
     public Port createRemoteOutputPort(String id, String name) {
         id = requireNonNull(id).intern();
@@ -941,8 +923,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Creates a new Remote Process Group with the given ID that points to the
-     * given URI
+     * Creates a new Remote Process Group with the given ID that points to the given URI
      *
      * @param id group id
      * @param uri group uri
@@ -955,8 +936,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Verifies that no output port exists with the given id or name. If this
-     * does not hold true, throws an IllegalStateException
+     * Verifies that no output port exists with the given id or name. If this does not hold true, throws an IllegalStateException
      *
      * @param id port identifier
      * @throws IllegalStateException port already exists
@@ -973,8 +953,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return the name of this controller, which is also the name of the Root
-     * Group.
+     * @return the name of this controller, which is also the name of the Root Group.
      */
     public String getName() {
         readLock.lock();
@@ -986,8 +965,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Sets the name for the Root Group, which also changes the name for the
-     * controller.
+     * Sets the name for the Root Group, which also changes the name for the controller.
      *
      * @param name of root group
      */
@@ -1001,8 +979,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return the comments of this controller, which is also the comment of the
-     * Root Group
+     * @return the comments of this controller, which is also the comment of the Root Group
      */
     public String getComments() {
         readLock.lock();
@@ -1016,8 +993,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     /**
      * Sets the comments
      *
-     * @param comments for the Root Group, which also changes the comment for
-     * the controller
+     * @param comments for the Root Group, which also changes the comment for the controller
      */
     public void setComments(final String comments) {
         readLock.lock();
@@ -1029,8 +1005,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return <code>true</code> if the scheduling engine for this controller
-     * has been terminated.
+     * @return <code>true</code> if the scheduling engine for this controller has been terminated.
      */
     public boolean isTerminated() {
         this.readLock.lock();
@@ -1042,16 +1017,12 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Triggers the controller to begin shutdown, stopping all processors and
-     * terminating the scheduling engine. After calling this method, the
-     * {@link #isTerminated()} method will indicate whether or not the shutdown
-     * has finished.
+     * Triggers the controller to begin shutdown, stopping all processors and terminating the scheduling engine. After calling this method, the {@link #isTerminated()} method will indicate whether or
+     * not the shutdown has finished.
      *
-     * @param kill if <code>true</code>, attempts to stop all active threads,
-     * but makes no guarantee that this will happen
+     * @param kill if <code>true</code>, attempts to stop all active threads, but makes no guarantee that this will happen
      *
-     * @throws IllegalStateException if the controller is already stopped or
-     * currently in the processor of stopping
+     * @throws IllegalStateException if the controller is already stopped or currently in the processor of stopping
      */
     public void shutdown(final boolean kill) {
         this.shutdown = true;
@@ -1150,8 +1121,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      *
      * @param serializer serializer
      * @param os stream
-     * @throws FlowSerializationException if serialization of the flow fails for
-     * any reason
+     * @throws FlowSerializationException if serialization of the flow fails for any reason
      */
     public void serialize(final FlowSerializer serializer, final OutputStream os) throws FlowSerializationException {
         readLock.lock();
@@ -1165,21 +1135,14 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     /**
      * Synchronizes this controller with the proposed flow.
      *
-     * For more details, see
-     * {@link FlowSynchronizer#sync(FlowController, DataFlow)}.
+     * For more details, see {@link FlowSynchronizer#sync(FlowController, DataFlow)}.
      *
      * @param synchronizer synchronizer
-     * @param dataFlow the flow to load the controller with. If the flow is null
-     * or zero length, then the controller must not have a flow or else an
-     * UninheritableFlowException will be thrown.
+     * @param dataFlow the flow to load the controller with. If the flow is null or zero length, then the controller must not have a flow or else an UninheritableFlowException will be thrown.
      *
-     * @throws FlowSerializationException if proposed flow is not a valid flow
-     * configuration file
-     * @throws UninheritableFlowException if the proposed flow cannot be loaded
-     * by the controller because in doing so would risk orphaning flow files
-     * @throws FlowSynchronizationException if updates to the controller failed.
-     * If this exception is thrown, then the controller should be considered
-     * unsafe to be used
+     * @throws FlowSerializationException if proposed flow is not a valid flow configuration file
+     * @throws UninheritableFlowException if the proposed flow cannot be loaded by the controller because in doing so would risk orphaning flow files
+     * @throws FlowSynchronizationException if updates to the controller failed. If this exception is thrown, then the controller should be considered unsafe to be used
      */
     public void synchronize(final FlowSynchronizer synchronizer, final DataFlow dataFlow)
             throws FlowSerializationException, FlowSynchronizationException, UninheritableFlowException {
@@ -1194,8 +1157,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return the currently configured maximum number of threads that can be
-     * used for executing processors at any given time.
+     * @return the currently configured maximum number of threads that can be used for executing processors at any given time.
      */
     public int getMaxTimerDrivenThreadCount() {
         return maxTimerDrivenThreads.get();
@@ -1225,8 +1187,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Updates the number of threads that can be simultaneously used for
-     * executing processors.
+     * Updates the number of threads that can be simultaneously used for executing processors.
      *
      * @param maxThreadCount
      *
@@ -1261,8 +1222,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      * @param group the ProcessGroup that is to become the new Root Group
      *
      * @throws IllegalArgumentException if the ProcessGroup has a parent
-     * @throws IllegalStateException if the FlowController does not know about
-     * the given process group
+     * @throws IllegalStateException if the FlowController does not know about the given process group
      */
     void setRootGroup(final ProcessGroup group) {
         if (requireNonNull(group).getParent() != null) {
@@ -1293,17 +1253,13 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     // ProcessGroup access
     //
     /**
-     * Updates the process group corresponding to the specified DTO. Any field
-     * in DTO that is <code>null</code> (with the exception of the required ID)
-     * will be ignored.
+     * Updates the process group corresponding to the specified DTO. Any field in DTO that is <code>null</code> (with the exception of the required ID) will be ignored.
      *
      * @param dto group
      * @throws ProcessorInstantiationException
      *
-     * @throws IllegalStateException if no process group can be found with the
-     * ID of DTO or with the ID of the DTO's parentGroupId, if the template ID
-     * specified is invalid, or if the DTO's Parent Group ID changes but the
-     * parent group has incoming or outgoing connections
+     * @throws IllegalStateException if no process group can be found with the ID of DTO or with the ID of the DTO's parentGroupId, if the template ID specified is invalid, or if the DTO's Parent
+     * Group ID changes but the parent group has incoming or outgoing connections
      *
      * @throws NullPointerException if the DTO or its ID is null
      */
@@ -1329,17 +1285,13 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     // Template access
     //
     /**
-     * Adds a template to this controller. The contents of this template must be
-     * part of the current flow. This is going create a template based on a
-     * snippet of this flow.
+     * Adds a template to this controller. The contents of this template must be part of the current flow. This is going create a template based on a snippet of this flow.
      *
      * @param dto template
      * @return a copy of the given DTO
      * @throws IOException if an I/O error occurs when persisting the Template
      * @throws NullPointerException if the DTO is null
-     * @throws IllegalArgumentException if does not contain all required
-     * information, such as the template name or a processor's configuration
-     * element
+     * @throws IllegalArgumentException if does not contain all required information, such as the template name or a processor's configuration element
      */
     public Template addTemplate(final TemplateDTO dto) throws IOException {
         return templateManager.addTemplate(dto);
@@ -1355,8 +1307,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Imports the specified template into this controller. The contents of this
-     * template may have come from another NiFi instance.
+     * Imports the specified template into this controller. The contents of this template may have come from another NiFi instance.
      *
      * @param dto dto
      * @return template
@@ -1368,8 +1319,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
 
     /**
      * @param id identifier
-     * @return the template with the given ID, or <code>null</code> if no
-     * template exists with the given ID
+     * @return the template with the given ID, or <code>null</code> if no template exists with the given ID
      */
     public Template getTemplate(final String id) {
         return templateManager.getTemplate(id);
@@ -1406,21 +1356,15 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     // Snippet
     //
     /**
-     * Creates an instance of the given snippet and adds the components to the
-     * given group
+     * Creates an instance of the given snippet and adds the components to the given group
      *
      * @param group group
      * @param dto dto
      *
      * @throws NullPointerException if either argument is null
-     * @throws IllegalStateException if the snippet is not valid because a
-     * component in the snippet has an ID that is not unique to this flow, or
-     * because it shares an Input Port or Output Port at the root level whose
-     * name already exists in the given ProcessGroup, or because the Template
-     * contains a Processor or a Prioritizer whose class is not valid within
-     * this instance of NiFi.
-     * @throws ProcessorInstantiationException if unable to instantiate a
-     * processor
+     * @throws IllegalStateException if the snippet is not valid because a component in the snippet has an ID that is not unique to this flow, or because it shares an Input Port or Output Port at the
+     * root level whose name already exists in the given ProcessGroup, or because the Template contains a Processor or a Prioritizer whose class is not valid within this instance of NiFi.
+     * @throws ProcessorInstantiationException if unable to instantiate a processor
      */
     public void instantiateSnippet(final ProcessGroup group, final FlowSnippetDTO dto) throws ProcessorInstantiationException {
         writeLock.lock();
@@ -1732,8 +1676,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Returns the parent of the specified Connectable. This only considers this
-     * group and any direct child sub groups.
+     * Returns the parent of the specified Connectable. This only considers this group and any direct child sub groups.
      *
      * @param parentGroupId group id
      * @return parent group
@@ -1751,20 +1694,16 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
      * Verifies that the given DTO is valid, according to the following:
      *
      * <ul>
-     * <li>None of the ID's in any component of the DTO can be used in this
-     * flow.</li>
-     * <li>The ProcessGroup to which the template's contents will be added must
-     * not contain any InputPort or OutputPort with the same name as one of the
-     * corresponding components in the root level of the template.</li>
+     * <li>None of the ID's in any component of the DTO can be used in this flow.</li>
+     * <li>The ProcessGroup to which the template's contents will be added must not contain any InputPort or OutputPort with the same name as one of the corresponding components in the root level of
+     * the template.</li>
      * <li>All Processors' classes must exist in this instance.</li>
      * <li>All Flow File Prioritizers' classes must exist in this instance.</li>
      * </ul>
      * </p>
      *
      * <p>
-     * If any of the above statements does not hold true, an
-     * {@link IllegalStateException} or a
-     * {@link ProcessorInstantiationException} will be thrown.
+     * If any of the above statements does not hold true, an {@link IllegalStateException} or a {@link ProcessorInstantiationException} will be thrown.
      * </p>
      *
      * @param group group
@@ -1875,8 +1814,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     // Processor access
     //
     /**
-     * Indicates whether or not the two ID's point to the same ProcessGroup. If
-     * either id is null, will return <code>false</code>.
+     * Indicates whether or not the two ID's point to the same ProcessGroup. If either id is null, will return <code>false</code>.
      *
      * @param id1 group id
      * @param id2 other group id
@@ -2846,8 +2784,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     // Clustering methods
     //
     /**
-     * Starts heartbeating to the cluster. May only be called if the instance
-     * was constructed for a clustered environment.
+     * Starts heartbeating to the cluster. May only be called if the instance was constructed for a clustered environment.
      *
      * @throws IllegalStateException if not configured for clustering
      */
@@ -2874,31 +2811,23 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Notifies controller that the sending of heartbeats should be temporarily
-     * suspended. This method does not cancel any background tasks as does
-     * {@link #stopHeartbeating()} and does not require any lock on the
-     * FlowController. Background tasks will still generate heartbeat messages
-     * and any background task currently in the process of sending a Heartbeat
-     * to the cluster will continue.
+     * Notifies controller that the sending of heartbeats should be temporarily suspended. This method does not cancel any background tasks as does {@link #stopHeartbeating()} and does not require any
+     * lock on the FlowController. Background tasks will still generate heartbeat messages and any background task currently in the process of sending a Heartbeat to the cluster will continue.
      */
     public void suspendHeartbeats() {
         heartbeatsSuspended.set(true);
     }
 
     /**
-     * Notifies controller that the sending of heartbeats should be re-enabled.
-     * This method does not submit any background tasks to take affect as does
-     * {@link #startHeartbeating()} and does not require any lock on the
-     * FlowController.
+     * Notifies controller that the sending of heartbeats should be re-enabled. This method does not submit any background tasks to take affect as does {@link #startHeartbeating()} and does not
+     * require any lock on the FlowController.
      */
     public void resumeHeartbeats() {
         heartbeatsSuspended.set(false);
     }
 
     /**
-     * Stops heartbeating to the cluster. May only be called if the instance was
-     * constructed for a clustered environment. If the controller was not
-     * heartbeating, then this method has no effect.
+     * Stops heartbeating to the cluster. May only be called if the instance was constructed for a clustered environment. If the controller was not heartbeating, then this method has no effect.
      *
      * @throws IllegalStateException if not clustered
      */
@@ -2985,9 +2914,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return true if this instance is clustered; false otherwise. Clustered
-     * means that a node is either connected or trying to connect to the
-     * cluster.
+     * @return true if this instance is clustered; false otherwise. Clustered means that a node is either connected or trying to connect to the cluster.
      */
     public boolean isClustered() {
         readLock.lock();
@@ -2999,10 +2926,8 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return the DN of the Cluster Manager that we are currently connected to,
-     * if available. This will return null if the instance is not clustered or
-     * if the instance is clustered but the NCM's DN is not available - for
-     * instance, if cluster communications are not secure
+     * @return the DN of the Cluster Manager that we are currently connected to, if available. This will return null if the instance is not clustered or if the instance is clustered but the NCM's DN
+     * is not available - for instance, if cluster communications are not secure
      */
     public String getClusterManagerDN() {
         readLock.lock();
@@ -3014,24 +2939,20 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * Sets whether this instance is clustered. Clustered means that a node is
-     * either connected or trying to connect to the cluster.
+     * Sets whether this instance is clustered. Clustered means that a node is either connected or trying to connect to the cluster.
      *
      * @param clustered true if clustered
-     * @param clusterInstanceId if clustered is true, indicates the InstanceID
-     * of the Cluster Manager
+     * @param clusterInstanceId if clustered is true, indicates the InstanceID of the Cluster Manager
      */
     public void setClustered(final boolean clustered, final String clusterInstanceId) {
         setClustered(clustered, clusterInstanceId, null);
     }
 
     /**
-     * Sets whether this instance is clustered. Clustered means that a node is
-     * either connected or trying to connect to the cluster.
+     * Sets whether this instance is clustered. Clustered means that a node is either connected or trying to connect to the cluster.
      *
      * @param clustered true if clustered
-     * @param clusterInstanceId if clustered is true, indicates the InstanceID
-     * of the Cluster Manager
+     * @param clusterInstanceId if clustered is true, indicates the InstanceID of the Cluster Manager
      * @param clusterManagerDn the DN of the NCM
      */
     public void setClustered(final boolean clustered, final String clusterInstanceId, final String clusterManagerDn) {
@@ -3077,8 +2998,7 @@ public class FlowController implements EventAccess, ControllerServiceProvider, R
     }
 
     /**
-     * @return true if this instance is the primary node in the cluster; false
-     * otherwise
+     * @return true if this instance is the primary node in the cluster; false otherwise
      */
     public boolean isPrimary() {
         rwLock.readLock().lock();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializationException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializationException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializationException.java
index f1ee760..444b9e5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializationException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializationException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.controller;
 
 /**
- * Represents the exceptional case when flow configuration is malformed and
- * therefore, cannot be serialized or deserialized.
+ * Represents the exceptional case when flow configuration is malformed and therefore, cannot be serialized or deserialized.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializer.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializer.java
index 331b26c..0528674 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializer.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSerializer.java
@@ -19,8 +19,7 @@ package org.apache.nifi.controller;
 import java.io.OutputStream;
 
 /**
- * Serializes the flow configuration of a controller instance to an output
- * stream.
+ * Serializes the flow configuration of a controller instance to an output stream.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizationException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizationException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizationException.java
index 706ac46..68673b4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizationException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizationException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.controller;
 
 /**
- * Represents the exceptional case when a controller managing an existing flow
- * fails to fully load a different flow.
+ * Represents the exceptional case when a controller managing an existing flow fails to fully load a different flow.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizer.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizer.java
index f6889fe..275f816 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizer.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowSynchronizer.java
@@ -25,27 +25,18 @@ import org.apache.nifi.encrypt.StringEncryptor;
 public interface FlowSynchronizer {
 
     /**
-     * Synchronizes the given controller with the given flow configuration. If
-     * loading the proposed flow configuration would cause the controller to
-     * orphan flow files, then an UninheritableFlowException is thrown.
+     * Synchronizes the given controller with the given flow configuration. If loading the proposed flow configuration would cause the controller to orphan flow files, then an
+     * UninheritableFlowException is thrown.
      *
-     * If the FlowSynchronizationException is thrown, then the controller may
-     * have changed some of its state and should no longer be used.
+     * If the FlowSynchronizationException is thrown, then the controller may have changed some of its state and should no longer be used.
      *
      * @param controller the flow controller
-     * @param dataFlow the flow to load the controller with. If the flow is null
-     * or zero length, then the controller must not have a flow or else an
-     * UninheritableFlowException will be thrown.
-     * @param encryptor used for the encryption/decryption of sensitive property
-     * values
+     * @param dataFlow the flow to load the controller with. If the flow is null or zero length, then the controller must not have a flow or else an UninheritableFlowException will be thrown.
+     * @param encryptor used for the encryption/decryption of sensitive property values
      *
-     * @throws FlowSerializationException if proposed flow is not a valid flow
-     * configuration file
-     * @throws UninheritableFlowException if the proposed flow cannot be loaded
-     * by the controller because in doing so would risk orphaning flow files
-     * @throws FlowSynchronizationException if updates to the controller failed.
-     * If this exception is thrown, then the controller should be considered
-     * unsafe to be used
+     * @throws FlowSerializationException if proposed flow is not a valid flow configuration file
+     * @throws UninheritableFlowException if the proposed flow cannot be loaded by the controller because in doing so would risk orphaning flow files
+     * @throws FlowSynchronizationException if updates to the controller failed. If this exception is thrown, then the controller should be considered unsafe to be used
      */
     void sync(FlowController controller, DataFlow dataFlow, StringEncryptor encryptor)
             throws FlowSerializationException, UninheritableFlowException, FlowSynchronizationException;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowUnmarshaller.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowUnmarshaller.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowUnmarshaller.java
index 7cc3039..cddb798 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowUnmarshaller.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/FlowUnmarshaller.java
@@ -38,9 +38,7 @@ import org.xml.sax.SAXException;
 public class FlowUnmarshaller {
 
     /**
-     * Interprets the given byte array as an XML document that conforms to the
-     * Flow Configuration schema and returns a FlowSnippetDTO representing the
-     * flow
+     * Interprets the given byte array as an XML document that conforms to the Flow Configuration schema and returns a FlowSnippetDTO representing the flow
      *
      * @param flowContents contents
      * @param encryptor encryptor

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSynchronizer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSynchronizer.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSynchronizer.java
index b66bedc..5448174 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSynchronizer.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSynchronizer.java
@@ -1030,15 +1030,13 @@ public class StandardFlowSynchronizer implements FlowSynchronizer {
     }
 
     /**
-     * Returns true if the given controller can inherit the proposed flow
-     * without orphaning flow files.
+     * Returns true if the given controller can inherit the proposed flow without orphaning flow files.
      *
      * @param existingFlow flow
      * @param controller the running controller
      * @param proposedFlow the flow to inherit
      *
-     * @return null if the controller can inherit the specified flow, an
-     * explanation of why it cannot be inherited otherwise
+     * @return null if the controller can inherit the specified flow, an explanation of why it cannot be inherited otherwise
      *
      * @throws FingerprintException if flow fingerprints could not be generated
      */
@@ -1080,14 +1078,12 @@ public class StandardFlowSynchronizer implements FlowSynchronizer {
     }
 
     /**
-     * Returns true if the given controller can inherit the proposed flow
-     * without orphaning flow files.
+     * Returns true if the given controller can inherit the proposed flow without orphaning flow files.
      *
      * @param existingFlow flow
      * @param proposedFlow the flow to inherit
      *
-     * @return null if the controller can inherit the specified flow, an
-     * explanation of why it cannot be inherited otherwise
+     * @return null if the controller can inherit the specified flow, an explanation of why it cannot be inherited otherwise
      *
      * @throws FingerprintException if flow fingerprints could not be generated
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index eb7ec83..7820b7b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -67,10 +67,8 @@ import org.quartz.CronExpression;
 import org.slf4j.LoggerFactory;
 
 /**
- * ProcessorNode provides thread-safe access to a FlowFileProcessor as it exists
- * within a controlled flow. This node keeps track of the processor, its
- * scheduling information and its relationships to other processors and whatever
- * scheduled futures exist for it. Must be thread safe.
+ * ProcessorNode provides thread-safe access to a FlowFileProcessor as it exists within a controlled flow. This node keeps track of the processor, its scheduling information and its relationships to
+ * other processors and whatever scheduled futures exist for it. Must be thread safe.
  *
  * @author none
  */
@@ -171,8 +169,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * Provides and opportunity to retain information about this particular
-     * processor instance
+     * Provides and opportunity to retain information about this particular processor instance
      *
      * @param comments new comments
      */
@@ -222,8 +219,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return if true flow file content generated by this processor is
-     * considered loss tolerant
+     * @return if true flow file content generated by this processor is considered loss tolerant
      */
     @Override
     public boolean isLossTolerant() {
@@ -236,8 +232,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return true if the processor has the {@link TriggerWhenEmpty}
-     * annotation, false otherwise.
+     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
      */
     @Override
     public boolean isTriggerWhenEmpty() {
@@ -245,8 +240,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return true if the processor has the {@link SideEffectFree} annotation,
-     * false otherwise.
+     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
      */
     @Override
     public boolean isSideEffectFree() {
@@ -259,8 +253,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return true if the processor has the
-     * {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
      */
     @Override
     public boolean isTriggerWhenAnyDestinationAvailable() {
@@ -268,8 +261,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * Indicates whether flow file content made by this processor must be
-     * persisted
+     * Indicates whether flow file content made by this processor must be persisted
      *
      * @param lossTolerant tolerant
      */
@@ -332,8 +324,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return an unmodifiable Set that contains all of the
-     * ProcessorRelationship objects that are configured to be auto-terminated
+     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
      */
     @Override
     public Set<Relationship> getAutoTerminatedRelationships() {
@@ -350,8 +341,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return the value of the processor's {@link CapabilityDescription}
-     * annotation, if one exists, else <code>null</code>.
+     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
      */
     @SuppressWarnings("deprecation")
     public String getProcessorDescription() {
@@ -384,11 +374,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @param timeUnit determines the unit of time to represent the scheduling
-     * period. If null will be reported in units of
-     * {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-     * @return the schedule period that should elapse before subsequent cycles
-     * of this processor's tasks
+     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
      */
     @Override
     public long getSchedulingPeriod(final TimeUnit timeUnit) {
@@ -410,8 +397,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
      *
      * @param schedulingStrategy strategy
      *
-     * @throws IllegalArgumentException if the SchedulingStrategy is not not
-     * applicable for this Processor
+     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
      */
     @Override
     public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
@@ -538,10 +524,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * Causes the processor not to be scheduled for some period of time. This
-     * duration can be obtained and set via the
-     * {@link #getYieldPeriod(TimeUnit)} and
-     * {@link #setYieldPeriod(long, TimeUnit)} methods.
+     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+     * methods.
      */
     @Override
     public void yield() {
@@ -561,8 +545,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return the number of milliseconds since Epoch at which time this
-     * processor is to once again be scheduled.
+     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
      */
     @Override
     public long getYieldExpiration() {
@@ -597,11 +580,9 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * Determines the number of concurrent tasks that may be running for this
-     * processor.
+     * Determines the number of concurrent tasks that may be running for this processor.
      *
-     * @param taskCount a number of concurrent tasks this processor may have
-     * running
+     * @param taskCount a number of concurrent tasks this processor may have running
      * @throws IllegalArgumentException if the given value is less than 1
      */
     @Override
@@ -628,8 +609,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return the number of tasks that may execute concurrently for this
-     * processor
+     * @return the number of tasks that may execute concurrently for this processor
      */
     @Override
     public int getMaxConcurrentTasks() {
@@ -857,8 +837,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
 
     /**
      * @param relationshipName name
-     * @return the relationship for this nodes processor for the given name or
-     * creates a new relationship for the given name
+     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
      */
     @Override
     public Relationship getRelationship(final String relationshipName) {
@@ -885,8 +864,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     /**
-     * @return the Set of destination processors for all relationships excluding
-     * any destinations that are this processor itself (self-loops)
+     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
      */
     public Set<Connectable> getDestinations() {
         final Set<Connectable> nonSelfDestinations = new HashSet<>();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
index 6a11a33..7b8e173 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
@@ -98,18 +98,14 @@ public class TemplateManager {
     }
 
     /**
-     * Adds a template to this manager. The contents of this template must be
-     * part of the current flow. This is going create a template based on a
-     * snippet of this flow. Any sensitive properties in the TemplateDTO will be
-     * removed.
+     * Adds a template to this manager. The contents of this template must be part of the current flow. This is going create a template based on a snippet of this flow. Any sensitive properties in the
+     * TemplateDTO will be removed.
      *
      * @param dto dto
      * @return a copy of the given DTO
      * @throws IOException if an I/O error occurs when persisting the Template
      * @throws NullPointerException if the DTO is null
-     * @throws IllegalArgumentException if does not contain all required
-     * information, such as the template name or a processor's configuration
-     * element
+     * @throws IllegalArgumentException if does not contain all required information, such as the template name or a processor's configuration element
      */
     public Template addTemplate(final TemplateDTO dto) throws IOException {
         scrubTemplate(dto.getSnippet());
@@ -246,8 +242,7 @@ public class TemplateManager {
     }
 
     /**
-     * Scrubs the template prior to persisting in order to remove fields that
-     * shouldn't be included or are unnecessary.
+     * Scrubs the template prior to persisting in order to remove fields that shouldn't be included or are unnecessary.
      *
      * @param snippet snippet
      */
@@ -294,8 +289,7 @@ public class TemplateManager {
     }
 
     /**
-     * Scrubs processors prior to saving. This includes removing sensitive
-     * properties, validation errors, property descriptors, etc.
+     * Scrubs processors prior to saving. This includes removing sensitive properties, validation errors, property descriptors, etc.
      *
      * @param processors procs
      */
@@ -348,8 +342,7 @@ public class TemplateManager {
     }
 
     /**
-     * Scrubs connections prior to saving. This includes removing available
-     * relationships.
+     * Scrubs connections prior to saving. This includes removing available relationships.
      *
      * @param connections conns
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/UninheritableFlowException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/UninheritableFlowException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/UninheritableFlowException.java
index ac6fc5f..2f9f0f8 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/UninheritableFlowException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/UninheritableFlowException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.controller;
 
 /**
- * Represents the exceptional case when a controller is to be loaded with a flow
- * that is fundamentally different than its existing flow.
+ * Represents the exceptional case when a controller is to be loaded with a flow that is fundamentally different than its existing flow.
  *
  * @author unattributed
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/FileSystemRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/FileSystemRepository.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/FileSystemRepository.java
index d69b417..e212bdb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/FileSystemRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/FileSystemRepository.java
@@ -1280,9 +1280,7 @@ public class FileSystemRepository implements ContentRepository {
         }
 
         /**
-         * @return {@code true} if wait is required to create claims against
-         * this Container, based on whether or not the container has reached its
-         * back pressure threshold
+         * @return {@code true} if wait is required to create claims against this Container, based on whether or not the container has reached its back pressure threshold
          */
         public boolean isWaitRequired() {
             if (!archiveEnabled) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProcessContext.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProcessContext.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProcessContext.java
index 7502641..1937d0d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProcessContext.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProcessContext.java
@@ -77,9 +77,7 @@ public class ProcessContext {
     }
 
     /**
-     * @return an unmodifiable list containing a copy of all incoming
-     * connections for the processor from which FlowFiles are allowed to be
-     * pulled
+     * @return an unmodifiable list containing a copy of all incoming connections for the processor from which FlowFiles are allowed to be pulled
      */
     List<Connection> getPollableConnections() {
         if (pollFromSelfLoopsOnly()) {
@@ -106,9 +104,7 @@ public class ProcessContext {
     }
 
     /**
-     * @return true if we are allowed to take FlowFiles only from self-loops.
-     * This is the case when no Relationships are available except for
-     * self-looping Connections
+     * @return true if we are allowed to take FlowFiles only from self-loops. This is the case when no Relationships are available except for self-looping Connections
      */
     private boolean pollFromSelfLoopsOnly() {
         if (isTriggerWhenAnyDestinationAvailable()) {
@@ -217,14 +213,10 @@ public class ProcessContext {
     }
 
     /**
-     * A Relationship is said to be Available if and only if all Connections for
-     * that Relationship are either self-loops or have non-full queues.
+     * A Relationship is said to be Available if and only if all Connections for that Relationship are either self-loops or have non-full queues.
      *
-     * @param requiredNumber minimum number of relationships that must have
-     * availability
-     * @return Checks if at least <code>requiredNumber</code> of
-     * Relationationships are "available." If so, returns <code>true</code>,
-     * otherwise returns <code>false</code>
+     * @param requiredNumber minimum number of relationships that must have availability
+     * @return Checks if at least <code>requiredNumber</code> of Relationationships are "available." If so, returns <code>true</code>, otherwise returns <code>false</code>
      */
     public boolean isRelationshipAvailabilitySatisfied(final int requiredNumber) {
         int unavailable = 0;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProvenanceEventEnricher.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProvenanceEventEnricher.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProvenanceEventEnricher.java
index db098fc..323bfb0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProvenanceEventEnricher.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/ProvenanceEventEnricher.java
@@ -22,8 +22,7 @@ import org.apache.nifi.provenance.ProvenanceEventRecord;
 public interface ProvenanceEventEnricher {
 
     /**
-     * Returns a new Provenance event that has been updated to contain the
-     * original and updated FlowFile attributes and content claim information.
+     * Returns a new Provenance event that has been updated to contain the original and updated FlowFile attributes and content claim information.
      *
      * @param record record
      * @param flowFile flowfile

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/RepositoryPurgeException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/RepositoryPurgeException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/RepositoryPurgeException.java
index e668be3..710bb79 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/RepositoryPurgeException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/RepositoryPurgeException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.controller.repository;
 
 /**
- * This exception is thrown when a flow file repository was unable to be
- * properly purged.
+ * This exception is thrown when a flow file repository was unable to be properly purged.
  *
  *
  */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardFlowFileRecord.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardFlowFileRecord.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardFlowFileRecord.java
index 433c3d2..6bb5e35 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardFlowFileRecord.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardFlowFileRecord.java
@@ -37,9 +37,7 @@ import org.apache.commons.lang3.builder.ToStringStyle;
 
 /**
  * <p>
- * A flow file is a logical notion of an item in a flow with its associated
- * attributes and identity which can be used as a reference for its actual
- * content.</p>
+ * A flow file is a logical notion of an item in a flow with its associated attributes and identity which can be used as a reference for its actual content.</p>
  *
  * <b>Immutable - Thread Safe</b>
  *
@@ -127,8 +125,7 @@ public final class StandardFlowFileRecord implements FlowFile, FlowFileRecord {
     }
 
     /**
-     * Provides the natural ordering for FlowFile objects which is based on
-     * their identifier.
+     * Provides the natural ordering for FlowFile objects which is based on their identifier.
      *
      * @param other other
      * @return standard compare contract

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java
index 4827ab7..d3b0690 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProcessSession.java
@@ -81,9 +81,7 @@ import org.slf4j.LoggerFactory;
 
 /**
  * <p>
- * Provides a ProcessSession that ensures all accesses, changes and transfers
- * occur in an atomic manner for all FlowFiles including their contents and
- * attributes</p>
+ * Provides a ProcessSession that ensures all accesses, changes and transfers occur in an atomic manner for all FlowFiles including their contents and attributes</p>
  * <p>
  * NOT THREAD SAFE</p>
  * <p/>
@@ -770,10 +768,8 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * Checks if the given event is a spurious FORK, meaning that the FORK has a
-     * single child and that child was removed in this session. This happens
-     * when a Processor calls #create(FlowFile) and then removes the created
-     * FlowFile.
+     * Checks if the given event is a spurious FORK, meaning that the FORK has a single child and that child was removed in this session. This happens when a Processor calls #create(FlowFile) and then
+     * removes the created FlowFile.
      *
      * @param event event
      * @return true if spurious fork
@@ -790,10 +786,8 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * Checks if the given event is a spurious ROUTE, meaning that the ROUTE
-     * indicates that a FlowFile was routed to a relationship with only 1
-     * connection and that Connection is the Connection from which the FlowFile
-     * was pulled. I.e., the FlowFile was really routed nowhere.
+     * Checks if the given event is a spurious ROUTE, meaning that the ROUTE indicates that a FlowFile was routed to a relationship with only 1 connection and that Connection is the Connection from
+     * which the FlowFile was pulled. I.e., the FlowFile was really routed nowhere.
      *
      * @param event event
      * @param records records
@@ -2085,13 +2079,10 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * Checks if the ContentClaim associated with this record should be removed,
-     * since the record is about to be updated to point to a new content claim.
-     * If so, removes the working claim.
+     * Checks if the ContentClaim associated with this record should be removed, since the record is about to be updated to point to a new content claim. If so, removes the working claim.
      *
-     * This happens if & only if the content of this FlowFile has been modified
-     * since it was last committed to the FlowFile repository, because this
-     * indicates that the content is no longer needed and should be cleaned up.
+     * This happens if & only if the content of this FlowFile has been modified since it was last committed to the FlowFile repository, because this indicates that the content is no longer needed and
+     * should be cleaned up.
      *
      * @param record record
      */
@@ -2155,8 +2146,7 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * @return Indicates whether or not multiple FlowFiles should be merged into
-     * a single ContentClaim
+     * @return Indicates whether or not multiple FlowFiles should be merged into a single ContentClaim
      */
     private boolean isMergeContent() {
         if (writeRecursionLevel > 0) {
@@ -2573,8 +2563,7 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * Returns the attributes that are common to every FlowFile given. The key
-     * and value must match exactly.
+     * Returns the attributes that are common to every FlowFile given. The key and value must match exactly.
      *
      * @param flowFileList a list of FlowFiles
      *
@@ -2629,8 +2618,7 @@ public final class StandardProcessSession implements ProcessSession, ProvenanceE
     }
 
     /**
-     * Callback interface used to poll a FlowFileQueue, in order to perform
-     * functional programming-type of polling a queue
+     * Callback interface used to poll a FlowFileQueue, in order to perform functional programming-type of polling a queue
      */
     private static interface QueuePoller {
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java
index 5e8bb3e..a55fb25 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardProvenanceReporter.java
@@ -67,10 +67,8 @@ public class StandardProvenanceReporter implements ProvenanceReporter {
     }
 
     /**
-     * Generates a Fork event for the given child and parents but does not
-     * register the event. This is useful so that a ProcessSession has the
-     * ability to de-dupe events, since one or more events may be created by the
-     * session itself, as well as by the Processor
+     * Generates a Fork event for the given child and parents but does not register the event. This is useful so that a ProcessSession has the ability to de-dupe events, since one or more events may
+     * be created by the session itself, as well as by the Processor
      *
      * @param parents parents
      * @param child child

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryRecord.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryRecord.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryRecord.java
index c965ed8..5fcb35a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryRecord.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryRecord.java
@@ -37,8 +37,7 @@ public class StandardRepositoryRecord implements RepositoryRecord {
     private final Map<String, String> originalAttributes;
 
     /**
-     * Creates a new record which has no original claim or flow file - it is
-     * entirely new
+     * Creates a new record which has no original claim or flow file - it is entirely new
      *
      * @param originalQueue queue
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryStatusReport.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryStatusReport.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryStatusReport.java
index 4e41de1..3e30059 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryStatusReport.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/StandardRepositoryStatusReport.java
@@ -21,8 +21,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Encapsulates the status of the processor data transfers as stored in the
- * repository.
+ * Encapsulates the status of the processor data transfers as stored in the repository.
  */
 public class StandardRepositoryStatusReport implements RepositoryStatusReport {
 
@@ -30,8 +29,7 @@ public class StandardRepositoryStatusReport implements RepositoryStatusReport {
     }
 
     /**
-     * Returns a map where the key is the processor ID and the value is the
-     * status entry for the processor.
+     * Returns a map where the key is the processor ID and the value is the status entry for the processor.
      *
      * @return a map of report entries
      */
@@ -41,11 +39,9 @@ public class StandardRepositoryStatusReport implements RepositoryStatusReport {
     }
 
     /**
-     * Returns a particular entry for a given processor ID. If the processor ID
-     * does not exist, then null is returned.
+     * Returns a particular entry for a given processor ID. If the processor ID does not exist, then null is returned.
      *
-     * @param componentId the ID of a component; that component may be a
-     * Processor, a Connection, a ProcessGroup, etc.
+     * @param componentId the ID of a component; that component may be a Processor, a Connection, a ProcessGroup, etc.
      *
      * @return a status entry
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e811929f/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileContentRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileContentRepository.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileContentRepository.java
index 65756f4..5971865 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileContentRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/repository/VolatileContentRepository.java
@@ -54,33 +54,24 @@ import org.slf4j.LoggerFactory;
 
 /**
  * <p>
- * An in-memory implementation of the {@link ContentRepository} interface. This
- * implementation stores FlowFile content in the Java heap and keeps track of
- * the number of bytes used. If the number of bytes used by FlowFile content
- * exceeds some threshold (configured via the
- * <code>nifi.volatile.content.repository.max.size</code> property in the NiFi
- * properties with a default of 100 MB), one of two situations will occur:
+ * An in-memory implementation of the {@link ContentRepository} interface. This implementation stores FlowFile content in the Java heap and keeps track of the number of bytes used. If the number of
+ * bytes used by FlowFile content exceeds some threshold (configured via the <code>nifi.volatile.content.repository.max.size</code> property in the NiFi properties with a default of 100 MB), one of
+ * two situations will occur:
  * </p>
  *
  * <ul>
- * <li><b>Backup Repository:</b> If a Backup Repository has been specified (via
- * the {@link #setBackupRepository(ContentRepository)} method), the content will
- * be stored in the backup repository and all access to the FlowFile content
- * will automatically and transparently be proxied to the backup repository.
+ * <li><b>Backup Repository:</b> If a Backup Repository has been specified (via the {@link #setBackupRepository(ContentRepository)} method), the content will be stored in the backup repository and all
+ * access to the FlowFile content will automatically and transparently be proxied to the backup repository.
  * </li>
  * <li>
- * <b>Without Backup Repository:</b> If no Backup Repository has been specified,
- * when the threshold is exceeded, an IOException will be thrown.
+ * <b>Without Backup Repository:</b> If no Backup Repository has been specified, when the threshold is exceeded, an IOException will be thrown.
  * </li>
  * </ul>
  *
  * <p>
- * When a Content Claim is created via the {@link #create(boolean)} method, if
- * the <code>lossTolerant</code> flag is set to <code>false</code>, the Backup
- * Repository will be used to create the Content Claim and any accesses to the
- * ContentClaim will be proxied to the Backup Repository. If the Backup
- * Repository has not been specified, attempting to create a non-loss-tolerant
- * ContentClaim will result in an {@link IllegalStateException} being thrown.
+ * When a Content Claim is created via the {@link #create(boolean)} method, if the <code>lossTolerant</code> flag is set to <code>false</code>, the Backup Repository will be used to create the Content
+ * Claim and any accesses to the ContentClaim will be proxied to the Backup Repository. If the Backup Repository has not been specified, attempting to create a non-loss-tolerant ContentClaim will
+ * result in an {@link IllegalStateException} being thrown.
  * </p>
  */
 public class VolatileContentRepository implements ContentRepository {
@@ -144,8 +135,7 @@ public class VolatileContentRepository implements ContentRepository {
     }
 
     /**
-     * Specifies a Backup Repository where data should be written if this
-     * Repository fills up
+     * Specifies a Backup Repository where data should be written if this Repository fills up
      *
      * @param backup repo backup
      */


[22/50] [abbrv] incubator-nifi git commit: NIFI-527: Code cleanup

Posted by mc...@apache.org.
NIFI-527: Code cleanup


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/3cd18b0b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/3cd18b0b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/3cd18b0b

Branch: refs/heads/NIFI-292
Commit: 3cd18b0babc5133e35a2771bc0d0acaf974c381f
Parents: 666de3d
Author: Mark Payne <ma...@hotmail.com>
Authored: Mon Apr 27 14:13:55 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Mon Apr 27 14:13:55 2015 -0400

----------------------------------------------------------------------
 .../nifi/provenance/IndexConfiguration.java     |  12 +-
 .../PersistentProvenanceRepository.java         | 612 +++++++-------
 .../provenance/RepositoryConfiguration.java     | 106 +--
 .../nifi/provenance/StandardRecordReader.java   | 246 +++---
 .../nifi/provenance/StandardRecordWriter.java   | 138 ++--
 .../provenance/expiration/ExpirationAction.java |   6 +-
 .../provenance/lucene/DeleteIndexAction.java    |  12 +-
 .../nifi/provenance/lucene/DocsReader.java      |  79 +-
 .../nifi/provenance/lucene/IndexManager.java    | 820 +++++++++----------
 .../nifi/provenance/lucene/IndexSearch.java     |  38 +-
 .../nifi/provenance/lucene/IndexingAction.java  | 119 +--
 .../nifi/provenance/lucene/LineageQuery.java    |   6 +-
 .../nifi/provenance/lucene/LuceneUtil.java      |  38 +-
 .../provenance/rollover/CompressionAction.java  |  59 --
 .../provenance/rollover/RolloverAction.java     |  35 -
 .../provenance/serialization/RecordReader.java  |  57 +-
 .../provenance/serialization/RecordReaders.java | 136 +--
 .../provenance/serialization/RecordWriter.java  |  23 +-
 .../provenance/serialization/RecordWriters.java |   8 +-
 .../nifi/provenance/toc/StandardTocReader.java  |  44 +-
 .../nifi/provenance/toc/StandardTocWriter.java  |  35 +-
 .../apache/nifi/provenance/toc/TocReader.java   |  20 +-
 .../org/apache/nifi/provenance/toc/TocUtil.java |  27 +-
 .../apache/nifi/provenance/toc/TocWriter.java   |  16 +-
 .../TestPersistentProvenanceRepository.java     | 118 +--
 .../TestStandardRecordReaderWriter.java         | 162 ++--
 .../org/apache/nifi/provenance/TestUtil.java    |   2 +-
 .../provenance/toc/TestStandardTocReader.java   |  20 +-
 .../provenance/toc/TestStandardTocWriter.java   |   4 +-
 29 files changed, 1391 insertions(+), 1607 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
index a5474d5..3beab65 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
@@ -92,7 +92,7 @@ public class IndexConfiguration {
             }
             return firstRecord.getEventTime();
         } catch (final FileNotFoundException | EOFException fnf) {
-            return null;	// file no longer exists or there's no record in this file
+            return null; // file no longer exists or there's no record in this file
         } catch (final IOException ioe) {
             logger.warn("Failed to read first entry in file {} due to {}", provenanceLogFile, ioe.toString());
             logger.warn("", ioe);
@@ -201,7 +201,8 @@ public class IndexConfiguration {
      * desired
      * @param endTime the end time of the query for which the indices are
      * desired
-     * @return
+     * @return the index directories that are applicable only for the given time
+     * span (times inclusive).
      */
     public List<File> getIndexDirectories(final Long startTime, final Long endTime) {
         if (startTime == null && endTime == null) {
@@ -252,7 +253,8 @@ public class IndexConfiguration {
      *
      * @param provenanceLogFile the provenance log file for which the index
      * directories are desired
-     * @return
+     * @return the index directories that are applicable only for the given
+     * event log
      */
     public List<File> getIndexDirectories(final File provenanceLogFile) {
         final List<File> dirs = new ArrayList<>();
@@ -334,9 +336,7 @@ public class IndexConfiguration {
     }
 
     /**
-     * Returns the amount of disk space in bytes used by all of the indices
-     *
-     * @return
+     * @return the amount of disk space in bytes used by all of the indices
      */
     public long getIndexSize() {
         lock.lock();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
index 48cc164..fe89a5e 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
@@ -139,7 +139,6 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
 
     private final List<ExpirationAction> expirationActions = new ArrayList<>();
 
-    private final IndexingAction indexingAction;
     private final ConcurrentMap<String, AsyncQuerySubmission> querySubmissionMap = new ConcurrentHashMap<>();
     private final ConcurrentMap<String, AsyncLineageSubmission> lineageSubmissionMap = new ConcurrentHashMap<>();
 
@@ -151,7 +150,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
     private final AtomicBoolean initialized = new AtomicBoolean(false);
 
     private final AtomicBoolean repoDirty = new AtomicBoolean(false);
-    // we keep the last 1000 records on hand so that when the UI is opened and it asks for the last 1000 records we don't need to 
+    // we keep the last 1000 records on hand so that when the UI is opened and it asks for the last 1000 records we don't need to
     // read them. Since this is a very cheap operation to keep them, it's worth the tiny expense for the improved user experience.
     private final RingBuffer<ProvenanceEventRecord> latestRecords = new RingBuffer<>(1000);
     private EventReporter eventReporter;
@@ -184,13 +183,6 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         this.indexManager = new IndexManager();
         this.alwaysSync = configuration.isAlwaysSync();
         this.rolloverCheckMillis = rolloverCheckMillis;
-        
-        final List<SearchableField> fields = configuration.getSearchableFields();
-        if (fields != null && !fields.isEmpty()) {
-            indexingAction = new IndexingAction(this, indexConfig);
-        } else {
-            indexingAction = null;
-        }
 
         scheduledExecService = Executors.newScheduledThreadPool(3, new NamedThreadFactory("Provenance Maintenance Thread"));
         queryExecService = Executors.newFixedThreadPool(configuration.getQueryThreadPoolSize(), new NamedThreadFactory("Provenance Query Thread"));
@@ -205,69 +197,69 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
 
     @Override
     public void initialize(final EventReporter eventReporter) throws IOException {
-    	writeLock.lock();
-    	try {
-	        if (initialized.getAndSet(true)) {
-	            return;
-	        }
-	
-	        this.eventReporter = eventReporter;
-	
-	        recover();
-	
-	        if (configuration.isAllowRollover()) {
-	            writers = createWriters(configuration, idGenerator.get());
-	        }
-	
-	        if (configuration.isAllowRollover()) {
-	            scheduledExecService.scheduleWithFixedDelay(new Runnable() {
-	                @Override
-	                public void run() {
-	                    // Check if we need to roll over
-	                    if (needToRollover()) {
-	                        // it appears that we do need to roll over. Obtain write lock so that we can do so, and then
-	                        // confirm that we still need to.
-	                        writeLock.lock();
-	                        try {
-	                            logger.debug("Obtained write lock to perform periodic rollover");
-	
-	                            if (needToRollover()) {
-	                                try {
-	                                    rollover(false);
-	                                } catch (final Exception e) {
-	                                    logger.error("Failed to roll over Provenance Event Log due to {}", e.toString());
-	                                    logger.error("", e);
-	                                }
-	                            }
-	                        } finally {
-	                            writeLock.unlock();
-	                        }
-	                    }
-	                }
-	            }, rolloverCheckMillis, rolloverCheckMillis, TimeUnit.MILLISECONDS);
-	
-	            scheduledExecService.scheduleWithFixedDelay(new RemoveExpiredQueryResults(), 30L, 3L, TimeUnit.SECONDS);
-	            scheduledExecService.scheduleWithFixedDelay(new Runnable() {
-	                @Override
-	                public void run() {
-	                    try {
-	                        purgeOldEvents();
-	                    } catch (final Exception e) {
-	                        logger.error("Failed to purge old events from Provenance Repo due to {}", e.toString());
-	                        if (logger.isDebugEnabled()) {
-	                            logger.error("", e);
-	                        }
-	                        eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "Failed to purge old events from Provenance Repo due to " + e.toString());
-	                    }
-	                }
-	            }, 1L, 1L, TimeUnit.MINUTES);
-	
-	            expirationActions.add(new DeleteIndexAction(this, indexConfig, indexManager));
-	            expirationActions.add(new FileRemovalAction());
-	        }
-    	} finally {
-    		writeLock.unlock();
-    	}
+        writeLock.lock();
+        try {
+            if (initialized.getAndSet(true)) {
+                return;
+            }
+
+            this.eventReporter = eventReporter;
+
+            recover();
+
+            if (configuration.isAllowRollover()) {
+                writers = createWriters(configuration, idGenerator.get());
+            }
+
+            if (configuration.isAllowRollover()) {
+                scheduledExecService.scheduleWithFixedDelay(new Runnable() {
+                    @Override
+                    public void run() {
+                        // Check if we need to roll over
+                        if (needToRollover()) {
+                            // it appears that we do need to roll over. Obtain write lock so that we can do so, and then
+                            // confirm that we still need to.
+                            writeLock.lock();
+                            try {
+                                logger.debug("Obtained write lock to perform periodic rollover");
+
+                                if (needToRollover()) {
+                                    try {
+                                        rollover(false);
+                                    } catch (final Exception e) {
+                                        logger.error("Failed to roll over Provenance Event Log due to {}", e.toString());
+                                        logger.error("", e);
+                                    }
+                                }
+                            } finally {
+                                writeLock.unlock();
+                            }
+                        }
+                    }
+                }, rolloverCheckMillis, rolloverCheckMillis, TimeUnit.MILLISECONDS);
+
+                scheduledExecService.scheduleWithFixedDelay(new RemoveExpiredQueryResults(), 30L, 3L, TimeUnit.SECONDS);
+                scheduledExecService.scheduleWithFixedDelay(new Runnable() {
+                    @Override
+                    public void run() {
+                        try {
+                            purgeOldEvents();
+                        } catch (final Exception e) {
+                            logger.error("Failed to purge old events from Provenance Repo due to {}", e.toString());
+                            if (logger.isDebugEnabled()) {
+                                logger.error("", e);
+                            }
+                            eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "Failed to purge old events from Provenance Repo due to " + e.toString());
+                        }
+                    }
+                }, 1L, 1L, TimeUnit.MINUTES);
+
+                expirationActions.add(new DeleteIndexAction(this, indexConfig, indexManager));
+                expirationActions.add(new FileRemovalAction());
+            }
+        } finally {
+            writeLock.unlock();
+        }
     }
 
     private static RepositoryConfiguration createRepositoryConfiguration() throws IOException {
@@ -489,28 +481,26 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                 maxIdFile = file;
             }
 
-            if (firstId > maxIndexedId && indexingAction != null && indexingAction.hasBeenPerformed(file)) {
+            if (firstId > maxIndexedId) {
                 maxIndexedId = firstId - 1;
             }
 
-            if (firstId < minIndexedId && indexingAction != null && indexingAction.hasBeenPerformed(file)) {
+            if (firstId < minIndexedId) {
                 minIndexedId = firstId;
             }
         }
 
         if (maxIdFile != null) {
-            final boolean lastFileIndexed = indexingAction == null ? false : indexingAction.hasBeenPerformed(maxIdFile);
-
             // Determine the max ID in the last file.
             try (final RecordReader reader = RecordReaders.newRecordReader(maxIdFile, getAllLogFiles())) {
-            	final long eventId = reader.getMaxEventId();
+                final long eventId = reader.getMaxEventId();
                 if (eventId > maxId) {
                     maxId = eventId;
                 }
 
                 // If the ID is greater than the max indexed id and this file was indexed, then
                 // update the max indexed id
-                if (eventId > maxIndexedId && lastFileIndexed) {
+                if (eventId > maxIndexedId) {
                     maxIndexedId = eventId;
                 }
             } catch (final IOException ioe) {
@@ -567,7 +557,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             // Read the records in the last file to find its max id
             if (greatestMinIdFile != null) {
                 try (final RecordReader recordReader = RecordReaders.newRecordReader(greatestMinIdFile, Collections.<Path>emptyList())) {
-                	maxId = recordReader.getMaxEventId();
+                    maxId = recordReader.getMaxEventId();
                 }
             }
 
@@ -604,11 +594,11 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             queryExecService.shutdownNow();
 
             indexManager.close();
-            
+
             if ( writers != null ) {
-	            for (final RecordWriter writer : writers) {
-	                writer.close();
-	            }
+                for (final RecordWriter writer : writers) {
+                    writer.close();
+                }
             }
         } finally {
             writeLock.unlock();
@@ -624,7 +614,8 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         readLock.lock();
         try {
             if (repoDirty.get()) {
-                logger.debug("Cannot persist provenance record because there was an IOException last time a record persistence was attempted. Will not attempt to persist more records until the repo has been rolled over.");
+                logger.debug("Cannot persist provenance record because there was an IOException last time a record persistence was attempted. "
+                        + "Will not attempt to persist more records until the repo has been rolled over.");
                 return;
             }
 
@@ -670,7 +661,8 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             } catch (final IOException ioe) {
                 logger.error("Failed to persist Provenance Event due to {}. Will not attempt to write to the Provenance Repository again until the repository has rolled over.", ioe.toString());
                 logger.error("", ioe);
-                eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "Failed to persist Provenance Event due to " + ioe.toString() + ". Will not attempt to write to the Provenance Repository again until the repository has rolled over");
+                eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "Failed to persist Provenance Event due to " + ioe.toString() +
+                        ". Will not attempt to write to the Provenance Repository again until the repository has rolled over");
 
                 // Switch from readLock to writeLock so that we can perform rollover
                 readLock.unlock();
@@ -735,9 +727,9 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
     /**
      * Returns the size, in bytes, of the Repository storage
      *
-     * @param logFiles
-     * @param timeCutoff
-     * @return
+     * @param logFiles the log files to consider
+     * @param timeCutoff if a log file's last modified date is before timeCutoff, it will be skipped
+     * @return the size of all log files given whose last mod date comes after (or equal to) timeCutoff
      */
     public long getSize(final List<File> logFiles, final long timeCutoff) {
         long bytesUsed = 0L;
@@ -760,7 +752,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
     /**
      * Purges old events from the repository
      *
-     * @throws IOException
+     * @throws IOException if unable to purge old events due to an I/O problem
      */
     void purgeOldEvents() throws IOException {
         while (!recoveryFinished.get()) {
@@ -858,12 +850,16 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
 
                 removed.add(baseName);
             } catch (final FileNotFoundException fnf) {
-                logger.warn("Failed to perform Expiration Action {} on Provenance Event file {} because the file no longer exists; will not perform additional Expiration Actions on this file", currentAction, file);
+                logger.warn("Failed to perform Expiration Action {} on Provenance Event file {} because the file no longer exists; will not "
+                        + "perform additional Expiration Actions on this file", currentAction, file);
                 removed.add(baseName);
             } catch (final Throwable t) {
-                logger.warn("Failed to perform Expiration Action {} on Provenance Event file {} due to {}; will not perform additional Expiration Actions on this file at this time", currentAction, file, t.toString());
+                logger.warn("Failed to perform Expiration Action {} on Provenance Event file {} due to {}; will not perform additional "
+                        + "Expiration Actions on this file at this time", currentAction, file, t.toString());
                 logger.warn("", t);
-                eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to perform Expiration Action " + currentAction + " on Provenance Event file " + file + " due to " + t.toString() + "; will not perform additional Expiration Actions on this file at this time");
+                eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to perform Expiration Action " + currentAction +
+                        " on Provenance Event file " + file + " due to " + t.toString() + "; will not perform additional Expiration Actions " +
+                        "on this file at this time");
             }
         }
 
@@ -906,24 +902,24 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
 
     // made protected for testing purposes
     protected int getJournalCount() {
-    	// determine how many 'journals' we have in the journals directories
+        // determine how many 'journals' we have in the journals directories
         int journalFileCount = 0;
         for ( final File storageDir : configuration.getStorageDirectories() ) {
-        	final File journalsDir = new File(storageDir, "journals");
-        	final File[] journalFiles = journalsDir.listFiles();
-        	if ( journalFiles != null ) {
-        		journalFileCount += journalFiles.length;
-        	}
+            final File journalsDir = new File(storageDir, "journals");
+            final File[] journalFiles = journalsDir.listFiles();
+            if ( journalFiles != null ) {
+                journalFileCount += journalFiles.length;
+            }
         }
-        
+
         return journalFileCount;
     }
-    
+
     /**
      * MUST be called with the write lock held
      *
-     * @param force
-     * @throws IOException
+     * @param force if true, will force a rollover regardless of whether or not data has been written
+     * @throws IOException if unable to complete rollover
      */
     private void rollover(final boolean force) throws IOException {
         if (!configuration.isAllowRollover()) {
@@ -938,44 +934,44 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                 final File writerFile = writer.getFile();
                 journalsToMerge.add(writerFile);
                 try {
-                	writer.close();
+                    writer.close();
                 } catch (final IOException ioe) {
-                	logger.warn("Failed to close {} due to {}", writer, ioe.toString());
-                	if ( logger.isDebugEnabled() ) {
-                		logger.warn("", ioe);
-                	}
+                    logger.warn("Failed to close {} due to {}", writer, ioe.toString());
+                    if ( logger.isDebugEnabled() ) {
+                        logger.warn("", ioe);
+                    }
                 }
             }
             if ( logger.isDebugEnabled() ) {
-            	logger.debug("Going to merge {} files for journals starting with ID {}", journalsToMerge.size(), LuceneUtil.substringBefore(journalsToMerge.get(0).getName(), "."));
+                logger.debug("Going to merge {} files for journals starting with ID {}", journalsToMerge.size(), LuceneUtil.substringBefore(journalsToMerge.get(0).getName(), "."));
             }
 
             int journalFileCount = getJournalCount();
             final int journalCountThreshold = configuration.getJournalCount() * 5;
             if ( journalFileCount > journalCountThreshold ) {
-            	logger.warn("The rate of the dataflow is exceeding the provenance recording rate. "
-            			+ "Slowing down flow to accomodate. Currently, there are {} journal files and "
-            			+ "threshold for blocking is {}", journalFileCount, journalCountThreshold);
-            	eventReporter.reportEvent(Severity.WARNING, "Provenance Repository", "The rate of the dataflow is "
-            			+ "exceeding the provenance recording rate. Slowing down flow to accomodate");
-            	
-            	while (journalFileCount > journalCountThreshold) {
-            		try {
-            			Thread.sleep(1000L);
-            		} catch (final InterruptedException ie) {
-            		}
-            		
-                	logger.debug("Provenance Repository is still behind. Keeping flow slowed down "
-                			+ "to accomodate. Currently, there are {} journal files and "
-                			+ "threshold for blocking is {}", journalFileCount, journalCountThreshold);
-
-            		journalFileCount = getJournalCount();
-            	}
-            	
-            	logger.info("Provenance Repository has no caught up with rolling over journal files. Current number of "
-            			+ "journal files to be rolled over is {}", journalFileCount);
-            }
-            
+                logger.warn("The rate of the dataflow is exceeding the provenance recording rate. "
+                        + "Slowing down flow to accomodate. Currently, there are {} journal files and "
+                        + "threshold for blocking is {}", journalFileCount, journalCountThreshold);
+                eventReporter.reportEvent(Severity.WARNING, "Provenance Repository", "The rate of the dataflow is "
+                        + "exceeding the provenance recording rate. Slowing down flow to accomodate");
+
+                while (journalFileCount > journalCountThreshold) {
+                    try {
+                        Thread.sleep(1000L);
+                    } catch (final InterruptedException ie) {
+                    }
+
+                    logger.debug("Provenance Repository is still behind. Keeping flow slowed down "
+                            + "to accomodate. Currently, there are {} journal files and "
+                            + "threshold for blocking is {}", journalFileCount, journalCountThreshold);
+
+                    journalFileCount = getJournalCount();
+                }
+
+                logger.info("Provenance Repository has no caught up with rolling over journal files. Current number of "
+                        + "journal files to be rolled over is {}", journalFileCount);
+            }
+
             writers = createWriters(configuration, idGenerator.get());
             streamStartTime.set(System.currentTimeMillis());
             recordsWrittenSinceRollover.getAndSet(0);
@@ -989,24 +985,24 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             final Runnable rolloverRunnable = new Runnable() {
                 @Override
                 public void run() {
-                	try {
-	                    final File fileRolledOver;
-	
-	                    try {
-	                        fileRolledOver = mergeJournals(journalsToMerge, storageDir, getMergeFile(journalsToMerge, storageDir), eventReporter, latestRecords);
-	                        repoDirty.set(false);
-	                    } catch (final IOException ioe) {
-	                        repoDirty.set(true);
-	                        logger.error("Failed to merge Journal Files {} into a Provenance Log File due to {}", journalsToMerge, ioe.toString());
-	                        logger.error("", ioe);
-	                        return;
-	                    }
-	
-	                    if (fileRolledOver == null) {
-	                        return;
-	                    }
-	                    File file = fileRolledOver;
-	
+                    try {
+                        final File fileRolledOver;
+
+                        try {
+                            fileRolledOver = mergeJournals(journalsToMerge, storageDir, getMergeFile(journalsToMerge, storageDir), eventReporter, latestRecords);
+                            repoDirty.set(false);
+                        } catch (final IOException ioe) {
+                            repoDirty.set(true);
+                            logger.error("Failed to merge Journal Files {} into a Provenance Log File due to {}", journalsToMerge, ioe.toString());
+                            logger.error("", ioe);
+                            return;
+                        }
+
+                        if (fileRolledOver == null) {
+                            return;
+                        }
+                        File file = fileRolledOver;
+
                         // update our map of id to Path
                         // need lock to update the map, even though it's an AtomicReference, AtomicReference allows those doing a
                         // get() to obtain the most up-to-date version but we use a writeLock to prevent multiple threads modifying
@@ -1021,24 +1017,24 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                         } finally {
                             writeLock.unlock();
                         }
-	
-	                    logger.info("Successfully Rolled over Provenance Event file containing {} records", recordsWritten);
-	                    rolloverCompletions.getAndIncrement();
-	                    
-	                    // We have finished successfully. Cancel the future so that we don't run anymore
-	                    Future<?> future;
-	                    while ((future = futureReference.get()) == null) {
-	                    	try {
-	                    		Thread.sleep(10L);
-	                    	} catch (final InterruptedException ie) {
-	                    	}
-	                    }
-	                    
-	                    future.cancel(false);
-	                } catch (final Throwable t) {
-	                	logger.error("Failed to rollover Provenance repository due to {}", t.toString());
-	                	logger.error("", t);
-	                }
+
+                        logger.info("Successfully Rolled over Provenance Event file containing {} records", recordsWritten);
+                        rolloverCompletions.getAndIncrement();
+
+                        // We have finished successfully. Cancel the future so that we don't run anymore
+                        Future<?> future;
+                        while ((future = futureReference.get()) == null) {
+                            try {
+                                Thread.sleep(10L);
+                            } catch (final InterruptedException ie) {
+                            }
+                        }
+
+                        future.cancel(false);
+                    } catch (final Throwable t) {
+                        logger.error("Failed to rollover Provenance repository due to {}", t.toString());
+                        logger.error("", t);
+                    }
                 }
             };
 
@@ -1074,10 +1070,10 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             for (final File journalFile : journalFiles) {
-            	if ( journalFile.isDirectory() ) {
-            		continue;
-            	}
-            	
+                if ( journalFile.isDirectory() ) {
+                    continue;
+                }
+
                 final String basename = LuceneUtil.substringBefore(journalFile.getName(), ".");
                 List<File> files = journalMap.get(basename);
                 if (files == null) {
@@ -1120,83 +1116,84 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         return mergedFile;
     }
 
-    File mergeJournals(final List<File> journalFiles, final File storageDir, final File mergedFile, final EventReporter eventReporter, final RingBuffer<ProvenanceEventRecord> ringBuffer) throws IOException {
-    	logger.debug("Merging {} to {}", journalFiles, mergedFile);
-    	if ( this.closed ) {
-    		logger.info("Provenance Repository has been closed; will not merge journal files to {}", mergedFile);
-    		return null;
-    	}
-    	
+    File mergeJournals(final List<File> journalFiles, final File storageDir, final File mergedFile, final EventReporter eventReporter,
+            final RingBuffer<ProvenanceEventRecord> ringBuffer) throws IOException {
+        logger.debug("Merging {} to {}", journalFiles, mergedFile);
+        if ( this.closed ) {
+            logger.info("Provenance Repository has been closed; will not merge journal files to {}", mergedFile);
+            return null;
+        }
+
         if (journalFiles.isEmpty()) {
             return null;
         }
 
         Collections.sort(journalFiles, new Comparator<File>() {
-			@Override
-			public int compare(final File o1, final File o2) {
-				final String suffix1 = LuceneUtil.substringAfterLast(o1.getName(), ".");
-				final String suffix2 = LuceneUtil.substringAfterLast(o2.getName(), ".");
-
-				try {
-					final int journalIndex1 = Integer.parseInt(suffix1);
-					final int journalIndex2 = Integer.parseInt(suffix2);
-					return Integer.compare(journalIndex1, journalIndex2);
-				} catch (final NumberFormatException nfe) {
-					return o1.getName().compareTo(o2.getName());
-				}
-			}
+            @Override
+            public int compare(final File o1, final File o2) {
+                final String suffix1 = LuceneUtil.substringAfterLast(o1.getName(), ".");
+                final String suffix2 = LuceneUtil.substringAfterLast(o2.getName(), ".");
+
+                try {
+                    final int journalIndex1 = Integer.parseInt(suffix1);
+                    final int journalIndex2 = Integer.parseInt(suffix2);
+                    return Integer.compare(journalIndex1, journalIndex2);
+                } catch (final NumberFormatException nfe) {
+                    return o1.getName().compareTo(o2.getName());
+                }
+            }
         });
-        
+
         final String firstJournalFile = journalFiles.get(0).getName();
         final String firstFileSuffix = LuceneUtil.substringAfterLast(firstJournalFile, ".");
         final boolean allPartialFiles = firstFileSuffix.equals("0");
-        
+
         // check if we have all of the "partial" files for the journal.
         if (allPartialFiles) {
-        	if ( mergedFile.exists() ) {
-        		// we have all "partial" files and there is already a merged file. Delete the data from the index
-        		// because the merge file may not be fully merged. We will re-merge.
-        		logger.warn("Merged Journal File {} already exists; however, all partial journal files also exist "
-        				+ "so assuming that the merge did not finish. Repeating procedure in order to ensure consistency.");
-        		
-        		final DeleteIndexAction deleteAction = new DeleteIndexAction(this, indexConfig, indexManager);
-        		try {
-        			deleteAction.execute(mergedFile);
-        		} catch (final Exception e) {
-        			logger.warn("Failed to delete records from Journal File {} from the index; this could potentially result in duplicates. Failure was due to {}", mergedFile, e.toString());
-        			if ( logger.isDebugEnabled() ) {
-        				logger.warn("", e);
-        			}
-        		}
-
-        		// Since we only store the file's basename, block offset, and event ID, and because the newly created file could end up on
-        		// a different Storage Directory than the original, we need to ensure that we delete both the partially merged
-        		// file and the TOC file. Otherwise, we could get the wrong copy and have issues retrieving events.
-        		if ( !mergedFile.delete() ) {
-        			logger.error("Failed to delete partially written Provenance Journal File {}. This may result in events from this journal "
-        					+ "file not being able to be displayed. This file should be deleted manually.", mergedFile);
-        		}
-        		
-        		final File tocFile = TocUtil.getTocFile(mergedFile);
-        		if ( tocFile.exists() && !tocFile.delete() ) {
-        			logger.error("Failed to delete .toc file {}; this may result in not being able to read the Provenance Events from the {} Journal File. "
-        					+ "This can be corrected by manually deleting the {} file", tocFile, mergedFile, tocFile);
-        		}
-        	}
+            if ( mergedFile.exists() ) {
+                // we have all "partial" files and there is already a merged file. Delete the data from the index
+                // because the merge file may not be fully merged. We will re-merge.
+                logger.warn("Merged Journal File {} already exists; however, all partial journal files also exist "
+                        + "so assuming that the merge did not finish. Repeating procedure in order to ensure consistency.");
+
+                final DeleteIndexAction deleteAction = new DeleteIndexAction(this, indexConfig, indexManager);
+                try {
+                    deleteAction.execute(mergedFile);
+                } catch (final Exception e) {
+                    logger.warn("Failed to delete records from Journal File {} from the index; this could potentially result in duplicates. Failure was due to {}", mergedFile, e.toString());
+                    if ( logger.isDebugEnabled() ) {
+                        logger.warn("", e);
+                    }
+                }
+
+                // Since we only store the file's basename, block offset, and event ID, and because the newly created file could end up on
+                // a different Storage Directory than the original, we need to ensure that we delete both the partially merged
+                // file and the TOC file. Otherwise, we could get the wrong copy and have issues retrieving events.
+                if ( !mergedFile.delete() ) {
+                    logger.error("Failed to delete partially written Provenance Journal File {}. This may result in events from this journal "
+                            + "file not being able to be displayed. This file should be deleted manually.", mergedFile);
+                }
+
+                final File tocFile = TocUtil.getTocFile(mergedFile);
+                if ( tocFile.exists() && !tocFile.delete() ) {
+                    logger.error("Failed to delete .toc file {}; this may result in not being able to read the Provenance Events from the {} Journal File. "
+                            + "This can be corrected by manually deleting the {} file", tocFile, mergedFile, tocFile);
+                }
+            }
         } else {
-        	logger.warn("Cannot merge journal files {} because expected first file to end with extension '.0' "
-        			+ "but it did not; assuming that the files were already merged but only some finished deletion "
-        			+ "before restart. Deleting remaining partial journal files.", journalFiles);
-        	
-        	for ( final File file : journalFiles ) {
-        		if ( !file.delete() && file.exists() ) {
-        			logger.warn("Failed to delete unneeded journal file {}; this file should be cleaned up manually", file);
-        		}
-        	}
-        	
-        	return null;
-        }
-        
+            logger.warn("Cannot merge journal files {} because expected first file to end with extension '.0' "
+                    + "but it did not; assuming that the files were already merged but only some finished deletion "
+                    + "before restart. Deleting remaining partial journal files.", journalFiles);
+
+            for ( final File file : journalFiles ) {
+                if ( !file.delete() && file.exists() ) {
+                    logger.warn("Failed to delete unneeded journal file {}; this file should be cleaned up manually", file);
+                }
+            }
+
+            return null;
+        }
+
         final long startNanos = System.nanoTime();
 
         // Map each journal to a RecordReader
@@ -1241,12 +1238,14 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                     record = reader.nextRecord();
                 } catch (final EOFException eof) {
                 } catch (final Exception e) {
-                    logger.warn("Failed to generate Provenance Event Record from Journal due to " + e + "; it's possible that the record wasn't completely written to the file. This record will be skipped.");
+                    logger.warn("Failed to generate Provenance Event Record from Journal due to " + e + "; it's possible that the record wasn't "
+                            + "completely written to the file. This record will be skipped.");
                     if (logger.isDebugEnabled()) {
                         logger.warn("", e);
                     }
 
-                    eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to read Provenance Event Record from Journal due to " + e + "; it's possible that hte record wasn't completely written to the file. This record will be skipped.");
+                    eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to read Provenance Event Record from Journal due to " + e +
+                            "; it's possible that hte record wasn't completely written to the file. This record will be skipped.");
                 }
 
                 if (record == null) {
@@ -1261,47 +1260,47 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             try (final RecordWriter writer = RecordWriters.newRecordWriter(writerFile, configuration.isCompressOnRollover(), true)) {
                 writer.writeHeader();
 
-                final IndexingAction indexingAction = new IndexingAction(this, indexConfig);
-                
+                final IndexingAction indexingAction = new IndexingAction(this);
+
                 final File indexingDirectory = indexConfig.getWritableIndexDirectory(writerFile);
                 final IndexWriter indexWriter = indexManager.borrowIndexWriter(indexingDirectory);
                 try {
-                	long maxId = 0L;
-                	
-	                while (!recordToReaderMap.isEmpty()) {
-	                    final Map.Entry<StandardProvenanceEventRecord, RecordReader> entry = recordToReaderMap.entrySet().iterator().next();
-	                    final StandardProvenanceEventRecord record = entry.getKey();
-	                    final RecordReader reader = entry.getValue();
-	
-	                    writer.writeRecord(record, record.getEventId());
-	                    final int blockIndex = writer.getTocWriter().getCurrentBlockIndex();
-	                    
-	                    indexingAction.index(record, indexWriter, blockIndex);
-	                    maxId = record.getEventId();
-	                    
-	                    ringBuffer.add(record);
-	                    records++;
-	
-	                    // Remove this entry from the map
-	                    recordToReaderMap.remove(record);
-	
-	                    // Get the next entry from this reader and add it to the map
-	                    StandardProvenanceEventRecord nextRecord = null;
-	
-	                    try {
-	                        nextRecord = reader.nextRecord();
-	                    } catch (final EOFException eof) {
-	                    }
-	
-	                    if (nextRecord != null) {
-	                        recordToReaderMap.put(nextRecord, reader);
-	                    }
-	                }
-	                
-	                indexWriter.commit();
-	                indexConfig.setMaxIdIndexed(maxId);
+                    long maxId = 0L;
+
+                    while (!recordToReaderMap.isEmpty()) {
+                        final Map.Entry<StandardProvenanceEventRecord, RecordReader> entry = recordToReaderMap.entrySet().iterator().next();
+                        final StandardProvenanceEventRecord record = entry.getKey();
+                        final RecordReader reader = entry.getValue();
+
+                        writer.writeRecord(record, record.getEventId());
+                        final int blockIndex = writer.getTocWriter().getCurrentBlockIndex();
+
+                        indexingAction.index(record, indexWriter, blockIndex);
+                        maxId = record.getEventId();
+
+                        ringBuffer.add(record);
+                        records++;
+
+                        // Remove this entry from the map
+                        recordToReaderMap.remove(record);
+
+                        // Get the next entry from this reader and add it to the map
+                        StandardProvenanceEventRecord nextRecord = null;
+
+                        try {
+                            nextRecord = reader.nextRecord();
+                        } catch (final EOFException eof) {
+                        }
+
+                        if (nextRecord != null) {
+                            recordToReaderMap.put(nextRecord, reader);
+                        }
+                    }
+
+                    indexWriter.commit();
+                    indexConfig.setMaxIdIndexed(maxId);
                 } finally {
-                	indexManager.returnIndexWriter(indexingDirectory, indexWriter);
+                    indexManager.returnIndexWriter(indexingDirectory, indexWriter);
                 }
             }
         } finally {
@@ -1319,7 +1318,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                 logger.warn("Failed to remove temporary journal file {}; this file should be cleaned up manually", journalFile.getAbsolutePath());
                 eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to remove temporary journal file " + journalFile.getAbsolutePath() + "; this file should be cleaned up manually");
             }
-            
+
             final File tocFile = TocUtil.getTocFile(journalFile);
             if (!tocFile.delete() && tocFile.exists()) {
                 logger.warn("Failed to remove temporary journal TOC file {}; this file should be cleaned up manually", tocFile.getAbsolutePath());
@@ -1374,7 +1373,8 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
     public QuerySubmission submitQuery(final Query query) {
         final int numQueries = querySubmissionMap.size();
         if (numQueries > MAX_UNDELETED_QUERY_RESULTS) {
-            throw new IllegalStateException("Cannot process query because there are currently " + numQueries + " queries whose results have not been deleted due to poorly behaving clients not issuing DELETE requests. Please try again later.");
+            throw new IllegalStateException("Cannot process query because there are currently " + numQueries + " queries whose results have not "
+                    + "been deleted due to poorly behaving clients not issuing DELETE requests. Please try again later.");
         }
 
         if (query.getEndDate() != null && query.getStartDate() != null && query.getStartDate().getTime() > query.getEndDate().getTime()) {
@@ -1416,7 +1416,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         final AtomicInteger retrievalCount = new AtomicInteger(0);
         final List<File> indexDirectories = indexConfig.getIndexDirectories(
                 query.getStartDate() == null ? null : query.getStartDate().getTime(),
-                query.getEndDate() == null ? null : query.getEndDate().getTime());
+                        query.getEndDate() == null ? null : query.getEndDate().getTime());
         final AsyncQuerySubmission result = new AsyncQuerySubmission(query, indexDirectories.size());
         querySubmissionMap.put(query.getIdentifier(), result);
 
@@ -1432,11 +1432,11 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
     }
 
     /**
-     * REMOVE-ME: This is for testing only and can be removed.
+     * This is for testing only and not actually used other than in debugging
      *
-     * @param luceneQuery
-     * @return
-     * @throws IOException
+     * @param luceneQuery the lucene query to execute
+     * @return an Iterator of ProvenanceEventRecord that match the query
+     * @throws IOException if unable to perform the query
      */
     public Iterator<ProvenanceEventRecord> queryLucene(final org.apache.lucene.search.Query luceneQuery) throws IOException {
         final List<File> indexFiles = indexConfig.getIndexDirectories();
@@ -1601,7 +1601,8 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         return computeLineage(Collections.<String>singleton(flowFileUuid), LineageComputationType.FLOWFILE_LINEAGE, null, 0L, Long.MAX_VALUE);
     }
 
-    private Lineage computeLineage(final Collection<String> flowFileUuids, final LineageComputationType computationType, final Long eventId, final Long startTimestamp, final Long endTimestamp) throws IOException {
+    private Lineage computeLineage(final Collection<String> flowFileUuids, final LineageComputationType computationType, final Long eventId, final Long startTimestamp,
+            final Long endTimestamp) throws IOException {
         final AsyncLineageSubmission submission = submitLineageComputation(flowFileUuids, computationType, eventId, startTimestamp, endTimestamp);
         final StandardLineageResult result = submission.getResult();
         while (!result.isFinished()) {
@@ -1623,7 +1624,8 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
         return submitLineageComputation(Collections.singleton(flowFileUuid), LineageComputationType.FLOWFILE_LINEAGE, null, 0L, Long.MAX_VALUE);
     }
 
-    private AsyncLineageSubmission submitLineageComputation(final Collection<String> flowFileUuids, final LineageComputationType computationType, final Long eventId, final long startTimestamp, final long endTimestamp) {
+    private AsyncLineageSubmission submitLineageComputation(final Collection<String> flowFileUuids, final LineageComputationType computationType,
+            final Long eventId, final long startTimestamp, final long endTimestamp) {
         final List<File> indexDirs = indexConfig.getIndexDirectories(startTimestamp, endTimestamp);
         final AsyncLineageSubmission result = new AsyncLineageSubmission(computationType, eventId, flowFileUuids, indexDirs.size());
         lineageSubmissionMap.put(result.getLineageIdentifier(), result);
@@ -1647,16 +1649,16 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             switch (event.getEventType()) {
-                case CLONE:
-                case FORK:
-                case JOIN:
-                case REPLAY:
-                    return submitLineageComputation(event.getChildUuids(), LineageComputationType.EXPAND_CHILDREN, eventId, event.getEventTime(), Long.MAX_VALUE);
-                default:
-                    final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
-                    lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
-                    submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its children cannot be expanded");
-                    return submission;
+            case CLONE:
+            case FORK:
+            case JOIN:
+            case REPLAY:
+                return submitLineageComputation(event.getChildUuids(), LineageComputationType.EXPAND_CHILDREN, eventId, event.getEventTime(), Long.MAX_VALUE);
+            default:
+                final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
+                lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
+                submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its children cannot be expanded");
+                return submission;
             }
         } catch (final IOException ioe) {
             final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
@@ -1684,17 +1686,17 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             switch (event.getEventType()) {
-                case JOIN:
-                case FORK:
-                case CLONE:
-                case REPLAY:
-                    return submitLineageComputation(event.getParentUuids(), LineageComputationType.EXPAND_PARENTS, eventId, 0L, event.getEventTime());
-                default: {
-                    final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);
-                    lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
-                    submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its parents cannot be expanded");
-                    return submission;
-                }
+            case JOIN:
+            case FORK:
+            case CLONE:
+            case REPLAY:
+                return submitLineageComputation(event.getParentUuids(), LineageComputationType.EXPAND_PARENTS, eventId, 0L, event.getEventTime());
+            default: {
+                final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);
+                lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
+                submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its parents cannot be expanded");
+                return submission;
+            }
             }
         } catch (final IOException ioe) {
             final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/RepositoryConfiguration.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/RepositoryConfiguration.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/RepositoryConfiguration.java
index 3951591..d0d147c 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/RepositoryConfiguration.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/RepositoryConfiguration.java
@@ -34,7 +34,7 @@ public class RepositoryConfiguration {
     private long desiredIndexBytes = 1024L * 1024L * 500L; // 500 MB
     private int journalCount = 16;
     private int compressionBlockBytes = 1024 * 1024;
-    
+
     private List<SearchableField> searchableFields = new ArrayList<>();
     private List<SearchableField> searchableAttributes = new ArrayList<>();
     private boolean compress = true;
@@ -50,19 +50,19 @@ public class RepositoryConfiguration {
         return allowRollover;
     }
 
-    
+
     public int getCompressionBlockBytes() {
-		return compressionBlockBytes;
-	}
+        return compressionBlockBytes;
+    }
 
-	public void setCompressionBlockBytes(int compressionBlockBytes) {
-		this.compressionBlockBytes = compressionBlockBytes;
-	}
+    public void setCompressionBlockBytes(int compressionBlockBytes) {
+        this.compressionBlockBytes = compressionBlockBytes;
+    }
 
-	/**
+    /**
      * Specifies where the repository will store data
      *
-     * @return
+     * @return the directories where provenance files will be stored
      */
     public List<File> getStorageDirectories() {
         return Collections.unmodifiableList(storageDirectories);
@@ -71,18 +71,15 @@ public class RepositoryConfiguration {
     /**
      * Specifies where the repository should store data
      *
-     * @param storageDirectory
+     * @param storageDirectory the directory to store provenance files
      */
     public void addStorageDirectory(final File storageDirectory) {
         this.storageDirectories.add(storageDirectory);
     }
 
     /**
-     * Returns the minimum amount of time that a given record will stay in the
-     * repository
-     *
-     * @param timeUnit
-     * @return
+     * @param timeUnit the desired time unit
+     * @return the max amount of time that a given record will stay in the repository
      */
     public long getMaxRecordLife(final TimeUnit timeUnit) {
         return timeUnit.convert(recordLifeMillis, TimeUnit.MILLISECONDS);
@@ -91,8 +88,8 @@ public class RepositoryConfiguration {
     /**
      * Specifies how long a record should stay in the repository
      *
-     * @param maxRecordLife
-     * @param timeUnit
+     * @param maxRecordLife the max amount of time to keep a record in the repo
+     * @param timeUnit the period of time used by maxRecordLife
      */
     public void setMaxRecordLife(final long maxRecordLife, final TimeUnit timeUnit) {
         this.recordLifeMillis = TimeUnit.MILLISECONDS.convert(maxRecordLife, timeUnit);
@@ -101,7 +98,7 @@ public class RepositoryConfiguration {
     /**
      * Returns the maximum amount of data to store in the repository (in bytes)
      *
-     * @return
+     * @return the maximum amount of disk space to use for the prov repo
      */
     public long getMaxStorageCapacity() {
         return storageCapacity;
@@ -109,107 +106,91 @@ public class RepositoryConfiguration {
 
     /**
      * Sets the maximum amount of data to store in the repository (in bytes)
-     * @param maxStorageCapacity
+     *
+     * @param maxStorageCapacity the maximum amount of disk space to use for the prov repo
      */
     public void setMaxStorageCapacity(final long maxStorageCapacity) {
         this.storageCapacity = maxStorageCapacity;
     }
 
     /**
-     * Returns the maximum amount of time to write to a single event file
-     *
-     * @param timeUnit
-     * @return
+     * @param timeUnit the desired time unit for the returned value
+     * @return the maximum amount of time that the repo will write to a single event file
      */
     public long getMaxEventFileLife(final TimeUnit timeUnit) {
         return timeUnit.convert(eventFileMillis, TimeUnit.MILLISECONDS);
     }
 
     /**
-     * Sets the maximum amount of time to write to a single event file
-     *
-     * @param maxEventFileTime
-     * @param timeUnit
+     * @param maxEventFileTime the max amount of time to write to a single event file
+     * @param timeUnit the units for the value supplied by maxEventFileTime
      */
     public void setMaxEventFileLife(final long maxEventFileTime, final TimeUnit timeUnit) {
         this.eventFileMillis = TimeUnit.MILLISECONDS.convert(maxEventFileTime, timeUnit);
     }
 
     /**
-     * Returns the maximum number of bytes (pre-compression) that will be
+     * @return the maximum number of bytes (pre-compression) that will be
      * written to a single event file before the file is rolled over
-     *
-     * @return
      */
     public long getMaxEventFileCapacity() {
         return eventFileBytes;
     }
 
     /**
-     * Sets the maximum number of bytes (pre-compression) that will be written
+     * @param maxEventFileBytes the maximum number of bytes (pre-compression) that will be written
      * to a single event file before the file is rolled over
-     *
-     * @param maxEventFileBytes
      */
     public void setMaxEventFileCapacity(final long maxEventFileBytes) {
         this.eventFileBytes = maxEventFileBytes;
     }
 
     /**
-     * Returns the fields that can be indexed
-     *
-     * @return
+     * @return the fields that should be indexed
      */
     public List<SearchableField> getSearchableFields() {
         return Collections.unmodifiableList(searchableFields);
     }
 
     /**
-     * Sets the fields to index
-     *
-     * @param searchableFields
+     * @param searchableFields the fields to index
      */
     public void setSearchableFields(final List<SearchableField> searchableFields) {
         this.searchableFields = new ArrayList<>(searchableFields);
     }
 
     /**
-     * Returns the FlowFile attributes that can be indexed
-     *
-     * @return
+     * @return the FlowFile attributes that should be indexed
      */
     public List<SearchableField> getSearchableAttributes() {
         return Collections.unmodifiableList(searchableAttributes);
     }
 
     /**
-     * Sets the FlowFile attributes to index
-     *
-     * @param searchableAttributes
+     * @param searchableAttributes the FlowFile attributes to index
      */
     public void setSearchableAttributes(final List<SearchableField> searchableAttributes) {
         this.searchableAttributes = new ArrayList<>(searchableAttributes);
     }
 
     /**
-     * Indicates whether or not event files will be compressed when they are
+     * @return whether or not event files will be compressed when they are
      * rolled over
-     *
-     * @return
      */
     public boolean isCompressOnRollover() {
         return compress;
     }
 
     /**
-     * Specifies whether or not to compress event files on rollover
-     *
-     * @param compress
+     * @param compress if true, the data will be compressed when rolled over
      */
     public void setCompressOnRollover(final boolean compress) {
         this.compress = compress;
     }
 
+    /**
+     * @return the number of threads to use to query the repo
+     */
     public int getQueryThreadPoolSize() {
         return queryThreadPoolSize;
     }
@@ -246,27 +227,23 @@ public class RepositoryConfiguration {
      * </li>
      * </ol>
      *
-     * @param bytes
+     * @param bytes the number of bytes to write to an index before beginning a new shard
      */
     public void setDesiredIndexSize(final long bytes) {
         this.desiredIndexBytes = bytes;
     }
 
     /**
-     * Returns the desired size of each index shard. See the
-     * {@Link #setDesiredIndexSize} method for an explanation of why we choose
+     * @return the desired size of each index shard. See the
+     * {@link #setDesiredIndexSize} method for an explanation of why we choose
      * to shard the index.
-     *
-     * @return
      */
     public long getDesiredIndexSize() {
         return desiredIndexBytes;
     }
 
     /**
-     * Sets the number of Journal files to use when persisting records.
-     *
-     * @param numJournals
+     * @param numJournals the number of Journal files to use when persisting records.
      */
     public void setJournalCount(final int numJournals) {
         if (numJournals < 1) {
@@ -277,19 +254,14 @@ public class RepositoryConfiguration {
     }
 
     /**
-     * Returns the number of Journal files that will be used when persisting
-     * records.
-     *
-     * @return
+     * @return the number of Journal files that will be used when persisting records.
      */
     public int getJournalCount() {
         return journalCount;
     }
 
     /**
-     * Specifies whether or not the Repository should sync all updates to disk.
-     *
-     * @return
+     * @return <code>true</code> if the repository will perform an 'fsync' for all updates to disk
      */
     public boolean isAlwaysSync() {
         return alwaysSync;
@@ -301,7 +273,7 @@ public class RepositoryConfiguration {
      * persisted across restarted, even if there is a power failure or a sudden
      * Operating System crash, but it can be very expensive.
      *
-     * @param alwaysSync
+     * @param alwaysSync whether or not to perform an 'fsync' for all updates to disk
      */
     public void setAlwaysSync(boolean alwaysSync) {
         this.alwaysSync = alwaysSync;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java
index 9bbf195..ca0d5ed 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordReader.java
@@ -39,40 +39,40 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class StandardRecordReader implements RecordReader {
-	private static final Logger logger = LoggerFactory.getLogger(StandardRecordReader.class);
-	
-	private final ByteCountingInputStream rawInputStream;
+    private static final Logger logger = LoggerFactory.getLogger(StandardRecordReader.class);
+
+    private final ByteCountingInputStream rawInputStream;
     private final String filename;
     private final int serializationVersion;
     private final boolean compressed;
     private final TocReader tocReader;
     private final int headerLength;
-    
+
     private DataInputStream dis;
     private ByteCountingInputStream byteCountingIn;
 
     public StandardRecordReader(final InputStream in, final String filename) throws IOException {
-    	this(in, filename, null);
+        this(in, filename, null);
     }
-    
+
     public StandardRecordReader(final InputStream in, final String filename, final TocReader tocReader) throws IOException {
-    	logger.trace("Creating RecordReader for {}", filename);
-    	
-    	rawInputStream = new ByteCountingInputStream(in);
+        logger.trace("Creating RecordReader for {}", filename);
+
+        rawInputStream = new ByteCountingInputStream(in);
 
         final InputStream limitedStream;
         if ( tocReader == null ) {
-        	limitedStream = rawInputStream;
+            limitedStream = rawInputStream;
         } else {
-        	final long offset1 = tocReader.getBlockOffset(1);
-        	if ( offset1 < 0 ) {
-        		limitedStream = rawInputStream;
-        	} else {
-        		limitedStream = new LimitingInputStream(rawInputStream, offset1 - rawInputStream.getBytesConsumed());
-        	}
-        }
-        
-    	final InputStream readableStream;
+            final long offset1 = tocReader.getBlockOffset(1);
+            if ( offset1 < 0 ) {
+                limitedStream = rawInputStream;
+            } else {
+                limitedStream = new LimitingInputStream(rawInputStream, offset1 - rawInputStream.getBytesConsumed());
+            }
+        }
+
+        final InputStream readableStream;
         if (filename.endsWith(".gz")) {
             readableStream = new BufferedInputStream(new GZIPInputStream(limitedStream));
             compressed = true;
@@ -83,11 +83,11 @@ public class StandardRecordReader implements RecordReader {
 
         byteCountingIn = new ByteCountingInputStream(readableStream);
         dis = new DataInputStream(byteCountingIn);
-        
+
         final String repoClassName = dis.readUTF();
         final int serializationVersion = dis.readInt();
-        headerLength = repoClassName.getBytes(StandardCharsets.UTF_8).length + 2 + 4;	// 2 bytes for string length, 4 for integer.
-        
+        headerLength = repoClassName.getBytes(StandardCharsets.UTF_8).length + 2 + 4; // 2 bytes for string length, 4 for integer.
+
         if (serializationVersion < 1 || serializationVersion > 8) {
             throw new IllegalArgumentException("Unable to deserialize record because the version is " + serializationVersion + " and supported versions are 1-8");
         }
@@ -99,52 +99,52 @@ public class StandardRecordReader implements RecordReader {
 
     @Override
     public void skipToBlock(final int blockIndex) throws IOException {
-    	if ( tocReader == null ) {
-    		throw new IllegalStateException("Cannot skip to block " + blockIndex + " for Provenance Log " + filename + " because no Table-of-Contents file was found for this Log");
-    	}
-    	
-    	if ( blockIndex < 0 ) {
-    		throw new IllegalArgumentException("Cannot skip to block " + blockIndex + " because the value is negative");
-    	}
-    	
-    	if ( blockIndex == getBlockIndex() ) {
-    		return;
-    	}
-    	
-    	final long offset = tocReader.getBlockOffset(blockIndex);
-    	if ( offset < 0 ) {
-    		throw new IOException("Unable to find block " + blockIndex + " in Provenance Log " + filename);
-    	}
-    	
-    	final long curOffset = rawInputStream.getBytesConsumed();
-    	
-    	final long bytesToSkip = offset - curOffset;
-    	if ( bytesToSkip >= 0 ) {
-	    	try {
-	    		StreamUtils.skip(rawInputStream, bytesToSkip);
-	    		logger.debug("Skipped stream from offset {} to {} ({} bytes skipped)", curOffset, offset, bytesToSkip);
-	    	} catch (final IOException e) {
-	    		throw new IOException("Failed to skip to offset " + offset + " for block " + blockIndex + " of Provenance Log " + filename, e);
-	    	}
-	
-	    	resetStreamForNextBlock();
-    	}
+        if ( tocReader == null ) {
+            throw new IllegalStateException("Cannot skip to block " + blockIndex + " for Provenance Log " + filename + " because no Table-of-Contents file was found for this Log");
+        }
+
+        if ( blockIndex < 0 ) {
+            throw new IllegalArgumentException("Cannot skip to block " + blockIndex + " because the value is negative");
+        }
+
+        if ( blockIndex == getBlockIndex() ) {
+            return;
+        }
+
+        final long offset = tocReader.getBlockOffset(blockIndex);
+        if ( offset < 0 ) {
+            throw new IOException("Unable to find block " + blockIndex + " in Provenance Log " + filename);
+        }
+
+        final long curOffset = rawInputStream.getBytesConsumed();
+
+        final long bytesToSkip = offset - curOffset;
+        if ( bytesToSkip >= 0 ) {
+            try {
+                StreamUtils.skip(rawInputStream, bytesToSkip);
+                logger.debug("Skipped stream from offset {} to {} ({} bytes skipped)", curOffset, offset, bytesToSkip);
+            } catch (final IOException e) {
+                throw new IOException("Failed to skip to offset " + offset + " for block " + blockIndex + " of Provenance Log " + filename, e);
+            }
+
+            resetStreamForNextBlock();
+        }
     }
-    
+
     private void resetStreamForNextBlock() throws IOException {
-    	final InputStream limitedStream;
+        final InputStream limitedStream;
         if ( tocReader == null ) {
-        	limitedStream = rawInputStream;
+            limitedStream = rawInputStream;
         } else {
-        	final long offset = tocReader.getBlockOffset(1 + getBlockIndex());
-        	if ( offset < 0 ) {
-        		limitedStream = rawInputStream;
-        	} else {
-        		limitedStream = new LimitingInputStream(rawInputStream, offset - rawInputStream.getBytesConsumed());
-        	}
-        }
-    	
-    	final InputStream readableStream;
+            final long offset = tocReader.getBlockOffset(1 + getBlockIndex());
+            if ( offset < 0 ) {
+                limitedStream = rawInputStream;
+            } else {
+                limitedStream = new LimitingInputStream(rawInputStream, offset - rawInputStream.getBytesConsumed());
+            }
+        }
+
+        final InputStream readableStream;
         if (compressed) {
             readableStream = new BufferedInputStream(new GZIPInputStream(limitedStream));
         } else {
@@ -154,32 +154,32 @@ public class StandardRecordReader implements RecordReader {
         byteCountingIn = new ByteCountingInputStream(readableStream, rawInputStream.getBytesConsumed());
         dis = new DataInputStream(byteCountingIn);
     }
-    
-    
+
+
     @Override
     public TocReader getTocReader() {
-    	return tocReader;
+        return tocReader;
     }
-    
+
     @Override
     public boolean isBlockIndexAvailable() {
-    	return tocReader != null;
+        return tocReader != null;
     }
-    
+
     @Override
     public int getBlockIndex() {
-    	if ( tocReader == null ) {
-    		throw new IllegalStateException("Cannot determine Block Index because no Table-of-Contents could be found for Provenance Log " + filename);
-    	}
-    	
-    	return tocReader.getBlockIndex(rawInputStream.getBytesConsumed());
+        if ( tocReader == null ) {
+            throw new IllegalStateException("Cannot determine Block Index because no Table-of-Contents could be found for Provenance Log " + filename);
+        }
+
+        return tocReader.getBlockIndex(rawInputStream.getBytesConsumed());
     }
-    
+
     @Override
     public long getBytesConsumed() {
-    	return byteCountingIn.getBytesConsumed();
+        return byteCountingIn.getBytesConsumed();
     }
-    
+
     private StandardProvenanceEventRecord readPreVersion6Record() throws IOException {
         final long startOffset = byteCountingIn.getBytesConsumed();
 
@@ -374,17 +374,17 @@ public class StandardRecordReader implements RecordReader {
     }
 
     private String readUUID(final DataInputStream in) throws IOException {
-    	if ( serializationVersion < 8 ) {
-	        final long msb = in.readLong();
-	        final long lsb = in.readLong();
-	        return new UUID(msb, lsb).toString();
-    	} else {
-    		// before version 8, we serialized UUID's as two longs in order to
-    		// write less data. However, in version 8 we changed to just writing
-    		// out the string because it's extremely expensive to call UUID.fromString.
-    		// In the end, since we generally compress, the savings in minimal anyway.
-    		return in.readUTF();
-    	}
+        if ( serializationVersion < 8 ) {
+            final long msb = in.readLong();
+            final long lsb = in.readLong();
+            return new UUID(msb, lsb).toString();
+        } else {
+            // before version 8, we serialized UUID's as two longs in order to
+            // write less data. However, in version 8 we changed to just writing
+            // out the string because it's extremely expensive to call UUID.fromString.
+            // In the end, since we generally compress, the savings in minimal anyway.
+            return in.readUTF();
+        }
     }
 
     private String readNullableString(final DataInputStream in) throws IOException {
@@ -416,53 +416,53 @@ public class StandardRecordReader implements RecordReader {
         byteCountingIn.mark(1);
         int nextByte = byteCountingIn.read();
         byteCountingIn.reset();
-        
+
         if ( nextByte < 0 ) {
-        	try {
-        		resetStreamForNextBlock();
-        	} catch (final EOFException eof) {
-        		return false;
-        	}
-        	
+            try {
+                resetStreamForNextBlock();
+            } catch (final EOFException eof) {
+                return false;
+            }
+
             byteCountingIn.mark(1);
             nextByte = byteCountingIn.read();
             byteCountingIn.reset();
         }
-        
+
         return (nextByte >= 0);
     }
-    
+
     @Override
     public long getMaxEventId() throws IOException {
-    	if ( tocReader != null ) {
-    		final long lastBlockOffset = tocReader.getLastBlockOffset();
-    		skipToBlock(tocReader.getBlockIndex(lastBlockOffset));
-    	}
-    	
-    	ProvenanceEventRecord record;
-    	ProvenanceEventRecord lastRecord = null;
-    	try {
-	    	while ((record = nextRecord()) != null) {
-	    		lastRecord = record;
-	    	}
-    	} catch (final EOFException eof) {
-    		// This can happen if we stop NIFi while the record is being written.
-    		// This is OK, we just ignore this record. The session will not have been
-    		// committed, so we can just process the FlowFile again.
-    	}
-    	
-    	return (lastRecord == null) ? -1L : lastRecord.getEventId();
+        if ( tocReader != null ) {
+            final long lastBlockOffset = tocReader.getLastBlockOffset();
+            skipToBlock(tocReader.getBlockIndex(lastBlockOffset));
+        }
+
+        ProvenanceEventRecord record;
+        ProvenanceEventRecord lastRecord = null;
+        try {
+            while ((record = nextRecord()) != null) {
+                lastRecord = record;
+            }
+        } catch (final EOFException eof) {
+            // This can happen if we stop NIFi while the record is being written.
+            // This is OK, we just ignore this record. The session will not have been
+            // committed, so we can just process the FlowFile again.
+        }
+
+        return (lastRecord == null) ? -1L : lastRecord.getEventId();
     }
 
     @Override
     public void close() throws IOException {
-    	logger.trace("Closing Record Reader for {}", filename);
-    	
+        logger.trace("Closing Record Reader for {}", filename);
+
         dis.close();
         rawInputStream.close();
-        
+
         if ( tocReader != null ) {
-        	tocReader.close();
+            tocReader.close();
         }
     }
 
@@ -473,9 +473,9 @@ public class StandardRecordReader implements RecordReader {
 
     @Override
     public void skipTo(final long position) throws IOException {
-    	// we are subtracting headerLength from the number of bytes consumed because we used to 
-    	// consider the offset of the first record "0" - now we consider it whatever position it
-    	// it really is in the stream.
+        // we are subtracting headerLength from the number of bytes consumed because we used to
+        // consider the offset of the first record "0" - now we consider it whatever position it
+        // it really is in the stream.
         final long currentPosition = byteCountingIn.getBytesConsumed() - headerLength;
         if (currentPosition == position) {
             return;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java
index dbb2c48..3095f13 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/StandardRecordWriter.java
@@ -36,15 +36,15 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class StandardRecordWriter implements RecordWriter {
-	private static final Logger logger = LoggerFactory.getLogger(StandardRecordWriter.class);
-	
+    private static final Logger logger = LoggerFactory.getLogger(StandardRecordWriter.class);
+
     private final File file;
     private final FileOutputStream fos;
     private final ByteCountingOutputStream rawOutStream;
     private final TocWriter tocWriter;
     private final boolean compressed;
     private final int uncompressedBlockSize;
-    
+
     private DataOutputStream out;
     private ByteCountingOutputStream byteCountingOut;
     private long lastBlockOffset = 0L;
@@ -52,21 +52,21 @@ public class StandardRecordWriter implements RecordWriter {
 
     private final Lock lock = new ReentrantLock();
 
-    
+
     public StandardRecordWriter(final File file, final TocWriter writer, final boolean compressed, final int uncompressedBlockSize) throws IOException {
-    	logger.trace("Creating Record Writer for {}", file.getName());
-    	
+        logger.trace("Creating Record Writer for {}", file.getName());
+
         this.file = file;
         this.compressed = compressed;
         this.fos = new FileOutputStream(file);
         rawOutStream = new ByteCountingOutputStream(fos);
         this.uncompressedBlockSize = uncompressedBlockSize;
-        
+
         this.tocWriter = writer;
     }
 
     static void writeUUID(final DataOutputStream out, final String uuid) throws IOException {
-    	out.writeUTF(uuid);
+        out.writeUTF(uuid);
     }
 
     static void writeUUIDs(final DataOutputStream out, final Collection<String> list) throws IOException {
@@ -85,49 +85,49 @@ public class StandardRecordWriter implements RecordWriter {
         return file;
     }
 
-	@Override
+    @Override
     public synchronized void writeHeader() throws IOException {
         lastBlockOffset = rawOutStream.getBytesWritten();
         resetWriteStream();
-        
+
         out.writeUTF(PersistentProvenanceRepository.class.getName());
         out.writeInt(PersistentProvenanceRepository.SERIALIZATION_VERSION);
         out.flush();
     }
-    
+
     private void resetWriteStream() throws IOException {
-    	if ( out != null ) {
-    		out.flush();
-    	}
-
-    	final long byteOffset = (byteCountingOut == null) ? rawOutStream.getBytesWritten() : byteCountingOut.getBytesWritten();
-    	
-    	final OutputStream writableStream;
-    	if ( compressed ) {
-    		// because of the way that GZIPOutputStream works, we need to call close() on it in order for it
-    		// to write its trailing bytes. But we don't want to close the underlying OutputStream, so we wrap
-    		// the underlying OutputStream in a NonCloseableOutputStream
-    		if ( out != null ) {
-    			out.close();
-    		}
-
-        	if ( tocWriter != null ) {
-        		tocWriter.addBlockOffset(rawOutStream.getBytesWritten());
-        	}
-
-    		writableStream = new BufferedOutputStream(new GZIPOutputStream(new NonCloseableOutputStream(rawOutStream), 1), 65536);
-    	} else {
-        	if ( tocWriter != null ) {
-        		tocWriter.addBlockOffset(rawOutStream.getBytesWritten());
-        	}
-
-    		writableStream = new BufferedOutputStream(rawOutStream, 65536);
-    	}
-    	
+        if ( out != null ) {
+            out.flush();
+        }
+
+        final long byteOffset = (byteCountingOut == null) ? rawOutStream.getBytesWritten() : byteCountingOut.getBytesWritten();
+
+        final OutputStream writableStream;
+        if ( compressed ) {
+            // because of the way that GZIPOutputStream works, we need to call close() on it in order for it
+            // to write its trailing bytes. But we don't want to close the underlying OutputStream, so we wrap
+            // the underlying OutputStream in a NonCloseableOutputStream
+            if ( out != null ) {
+                out.close();
+            }
+
+            if ( tocWriter != null ) {
+                tocWriter.addBlockOffset(rawOutStream.getBytesWritten());
+            }
+
+            writableStream = new BufferedOutputStream(new GZIPOutputStream(new NonCloseableOutputStream(rawOutStream), 1), 65536);
+        } else {
+            if ( tocWriter != null ) {
+                tocWriter.addBlockOffset(rawOutStream.getBytesWritten());
+            }
+
+            writableStream = new BufferedOutputStream(rawOutStream, 65536);
+        }
+
         this.byteCountingOut = new ByteCountingOutputStream(writableStream, byteOffset);
         this.out = new DataOutputStream(byteCountingOut);
     }
-    
+
 
     @Override
     public synchronized long writeRecord(final ProvenanceEventRecord record, long recordIdentifier) throws IOException {
@@ -136,16 +136,16 @@ public class StandardRecordWriter implements RecordWriter {
 
         // add a new block to the TOC if needed.
         if ( tocWriter != null && (startBytes - lastBlockOffset >= uncompressedBlockSize) ) {
-        	lastBlockOffset = startBytes;
-        	
-        	if ( compressed ) {
-        		// because of the way that GZIPOutputStream works, we need to call close() on it in order for it
-        		// to write its trailing bytes. But we don't want to close the underlying OutputStream, so we wrap
-        		// the underlying OutputStream in a NonCloseableOutputStream
-        		resetWriteStream();
-        	}
+            lastBlockOffset = startBytes;
+
+            if ( compressed ) {
+                // because of the way that GZIPOutputStream works, we need to call close() on it in order for it
+                // to write its trailing bytes. But we don't want to close the underlying OutputStream, so we wrap
+                // the underlying OutputStream in a NonCloseableOutputStream
+                resetWriteStream();
+            }
         }
-        
+
         out.writeLong(recordIdentifier);
         out.writeUTF(record.getEventType().name());
         out.writeLong(record.getEventTime());
@@ -175,7 +175,7 @@ public class StandardRecordWriter implements RecordWriter {
             writeLongNullableString(out, entry.getValue());
         }
 
-        // If Content Claim Info is present, write out a 'TRUE' followed by claim info. Else, write out 'false'. 
+        // If Content Claim Info is present, write out a 'TRUE' followed by claim info. Else, write out 'false'.
         if (record.getContentClaimSection() != null && record.getContentClaimContainer() != null && record.getContentClaimIdentifier() != null) {
             out.writeBoolean(true);
             out.writeUTF(record.getContentClaimContainer());
@@ -261,24 +261,24 @@ public class StandardRecordWriter implements RecordWriter {
 
     @Override
     public synchronized void close() throws IOException {
-    	logger.trace("Closing Record Writer for {}", file.getName());
-    	
+        logger.trace("Closing Record Writer for {}", file.getName());
+
         lock();
         try {
-        	try {
-        		out.flush();
-        		out.close();
-        	} finally {
-        		rawOutStream.close();
-            
-	            if ( tocWriter != null ) {
-	            	tocWriter.close();
-	            }
-        	}
+            try {
+                out.flush();
+                out.close();
+            } finally {
+                rawOutStream.close();
+
+                if ( tocWriter != null ) {
+                    tocWriter.close();
+                }
+            }
         } finally {
             unlock();
         }
-        
+
     }
 
     @Override
@@ -308,14 +308,14 @@ public class StandardRecordWriter implements RecordWriter {
 
     @Override
     public void sync() throws IOException {
-    	if ( tocWriter != null ) {
-    		tocWriter.sync();
-    	}
-    	fos.getFD().sync();
+        if ( tocWriter != null ) {
+            tocWriter.sync();
+        }
+        fos.getFD().sync();
     }
-    
+
     @Override
     public TocWriter getTocWriter() {
-    	return tocWriter;
+        return tocWriter;
     }
 }


[39/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/e1160f59
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/e1160f59
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/e1160f59

Branch: refs/heads/NIFI-292
Commit: e1160f5932b59eedf738eca18b8031a58385ca16
Parents: a52cf52
Author: joewitt <jo...@apache.org>
Authored: Tue Apr 28 00:27:12 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Tue Apr 28 00:27:12 2015 -0400

----------------------------------------------------------------------
 nifi-parent/pom.xml                             |  33 ++--
 .../apache/nifi/audit/ControllerAuditor.java    |  38 ++--
 .../nifi/audit/ControllerServiceAuditor.java    |  93 +++++-----
 .../org/apache/nifi/audit/FunnelAuditor.java    |  33 ++--
 .../java/org/apache/nifi/audit/NiFiAuditor.java |   8 +-
 .../java/org/apache/nifi/audit/PortAuditor.java |  45 +++--
 .../apache/nifi/audit/ProcessGroupAuditor.java  |  42 ++---
 .../org/apache/nifi/audit/ProcessorAuditor.java |  66 +++----
 .../apache/nifi/audit/RelationshipAuditor.java  |  72 ++++----
 .../nifi/audit/RemoteProcessGroupAuditor.java   |  56 +++---
 .../apache/nifi/audit/ReportingTaskAuditor.java |  64 ++++---
 .../org/apache/nifi/audit/SnippetAuditor.java   |  40 ++---
 .../IllegalClusterResourceRequestException.java |   3 +-
 .../nifi/web/StandardNiFiServiceFacade.java     |  36 ++--
 .../StandardNiFiWebConfigurationContext.java    |  35 ++--
 .../apache/nifi/web/StandardNiFiWebContext.java |   8 +-
 .../nifi/web/api/ApplicationResource.java       |  34 ++--
 .../nifi/web/api/BulletinBoardResource.java     |   7 +-
 .../apache/nifi/web/api/ClusterResource.java    |   6 +-
 .../apache/nifi/web/api/ConnectionResource.java |  80 +++------
 .../apache/nifi/web/api/ControllerResource.java | 113 ++++--------
 .../nifi/web/api/ControllerServiceResource.java | 132 ++++++--------
 .../org/apache/nifi/web/api/FunnelResource.java |  43 ++---
 .../apache/nifi/web/api/HistoryResource.java    |  52 ++----
 .../apache/nifi/web/api/InputPortResource.java  |  48 ++----
 .../org/apache/nifi/web/api/LabelResource.java  |  46 ++---
 .../org/apache/nifi/web/api/NodeResource.java   |  20 +--
 .../apache/nifi/web/api/OutputPortResource.java |  48 ++----
 .../nifi/web/api/ProcessGroupResource.java      | 155 ++++++-----------
 .../apache/nifi/web/api/ProcessorResource.java  |  69 +++-----
 .../apache/nifi/web/api/ProvenanceResource.java | 129 +++++---------
 .../web/api/RemoteProcessGroupResource.java     |  99 ++++-------
 .../nifi/web/api/ReportingTaskResource.java     |  92 +++-------
 .../apache/nifi/web/api/SnippetResource.java    |  79 ++++-----
 .../nifi/web/api/SystemDiagnosticsResource.java |   4 +-
 .../apache/nifi/web/api/TemplateResource.java   |  38 ++--
 .../apache/nifi/web/api/UserGroupResource.java  |  44 ++---
 .../org/apache/nifi/web/api/UserResource.java   |  33 ++--
 .../config/AdministrationExceptionMapper.java   |   1 -
 ...ationCredentialsNotFoundExceptionMapper.java |   3 +-
 .../web/api/config/ClusterExceptionMapper.java  |   1 -
 .../NoResponseFromNodesExceptionMapper.java     |   3 +-
 .../nifi/web/api/config/ThrowableMapper.java    |   1 -
 .../org/apache/nifi/web/api/dto/DtoFactory.java | 172 +++++++++----------
 .../org/apache/nifi/web/api/package-info.java   |  41 ++---
 .../request/BulletinBoardPatternParameter.java  |   3 +-
 .../nifi/web/api/request/ClientIdParameter.java |   3 +-
 .../nifi/web/api/request/DateTimeParameter.java |   3 +-
 .../nifi/web/api/request/DoubleParameter.java   |   3 +-
 .../nifi/web/api/request/IntegerParameter.java  |   3 +-
 .../nifi/web/api/request/LongParameter.java     |   3 +-
 .../ApplicationStartupContextListener.java      |   8 +-
 .../nifi/web/controller/ControllerFacade.java   | 162 ++++++++---------
 .../nifi/web/dao/ControllerServiceDAO.java      |   3 +-
 .../java/org/apache/nifi/web/dao/PortDAO.java   |   2 +-
 .../apache/nifi/web/dao/ProcessGroupDAO.java    |  11 +-
 .../org/apache/nifi/web/dao/ProcessorDAO.java   |   3 +-
 .../nifi/web/dao/RemoteProcessGroupDAO.java     |   4 +-
 .../apache/nifi/web/dao/ReportingTaskDAO.java   |  11 +-
 .../org/apache/nifi/web/dao/SnippetDAO.java     |   3 +-
 .../org/apache/nifi/web/dao/TemplateDAO.java    |  17 +-
 .../apache/nifi/web/dao/impl/ComponentDAO.java  |  21 +--
 .../web/dao/impl/StandardConnectionDAO.java     |  48 ------
 .../dao/impl/StandardControllerServiceDAO.java  |  67 +-------
 .../nifi/web/dao/impl/StandardFunnelDAO.java    |  48 ------
 .../nifi/web/dao/impl/StandardInputPortDAO.java |  41 -----
 .../nifi/web/dao/impl/StandardLabelDAO.java     |  48 ------
 .../web/dao/impl/StandardOutputPortDAO.java     |  41 -----
 .../web/dao/impl/StandardProcessGroupDAO.java   |  34 ----
 .../nifi/web/dao/impl/StandardProcessorDAO.java |  49 ------
 .../dao/impl/StandardRemoteProcessGroupDAO.java |  29 +---
 .../web/dao/impl/StandardReportingTaskDAO.java  |  63 +------
 .../nifi/web/dao/impl/StandardSnippetDAO.java   |  43 -----
 .../nifi/web/filter/NodeRequestFilter.java      |  21 +--
 .../org/apache/nifi/web/util/Availability.java  |   3 +-
 .../org/apache/nifi/web/util/SnippetUtils.java  |  14 +-
 .../apache/nifi/integration/NiFiWebApiTest.java |  27 ---
 .../accesscontrol/AdminAccessControlTest.java   | 162 +++++------------
 .../accesscontrol/DfmAccessControlTest.java     | 140 +++------------
 .../ReadOnlyAccessControlTest.java              | 162 +++++------------
 .../util/NiFiTestAuthorizationProvider.java     |  36 ++--
 .../nifi/integration/util/NiFiTestServer.java   |   7 +-
 .../nifi/integration/util/NiFiTestUser.java     |  88 +++++-----
 .../nifi/web/docs/DocumentationController.java  |   2 +-
 .../nifi-framework/nifi-web/nifi-web-ui/pom.xml |  17 --
 85 files changed, 1165 insertions(+), 2553 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi-parent/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-parent/pom.xml b/nifi-parent/pom.xml
index bdb94c8..dbc31df 100644
--- a/nifi-parent/pom.xml
+++ b/nifi-parent/pom.xml
@@ -248,6 +248,14 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-checkstyle-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>check-style</id>
+                        <goals>
+                            <goal>check</goal>
+                        </goals>
+                    </execution>
+                </executions>                
                 <configuration>
                     <checkstyleRules>
                         <module name="Checker">
@@ -354,38 +362,29 @@
                         </exclusions>
                     </dependency>
                 </dependencies>
-                <executions>
-                    <execution>
-                        <id>check-licenses</id>
-                        <goals>
-                            <goal>check</goal>
-                        </goals>
-                    </execution>
-                </executions>
             </plugin>             
         </plugins>
     </build>
     <profiles>
-        <profile> <!-- will move this up with the always on plugins once we get all checkstyle stuff resolved-->
-            <id>checkstyle</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
+        <profile>
+            <!-- Automatically check for licenses.  Too slow to always run.
+            Activate with -P check-licenses -->
+            <id>check-licenses</id>
             <build>
                 <plugins>
                     <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-checkstyle-plugin</artifactId>
+                        <groupId>org.apache.rat</groupId>
+                        <artifactId>apache-rat-plugin</artifactId>
                         <executions>
                             <execution>
-                                <id>check-style</id>
                                 <goals>
                                     <goal>check</goal>
                                 </goals>
+                                <phase>verify</phase>
                             </execution>
                         </executions>
                     </plugin>
-                </plugins>                
+                </plugins>
             </build>
         </profile>
     </profiles>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java
index 7f5f926..f1e31dd 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerAuditor.java
@@ -43,10 +43,10 @@ public class ControllerAuditor extends NiFiAuditor {
     /**
      * Audits updating the name of the controller.
      *
-     * @param proceedingJoinPoint
-     * @param name
-     * @param controllerFacade
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @param name name
+     * @param controllerFacade facade
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.controller.ControllerFacade) && "
             + "execution(void setName(java.lang.String)) && "
@@ -96,10 +96,10 @@ public class ControllerAuditor extends NiFiAuditor {
     /**
      * Audits updating the comments of the controller.
      *
-     * @param proceedingJoinPoint
-     * @param comments
-     * @param controllerFacade
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @param comments comments
+     * @param controllerFacade facade
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.controller.ControllerFacade) && "
             + "execution(void setComments(java.lang.String)) && "
@@ -147,13 +147,12 @@ public class ControllerAuditor extends NiFiAuditor {
     }
 
     /**
-     * Audits updating the max number of timer driven threads for the
-     * controller.
+     * Audits updating the max number of timer driven threads for the controller.
      *
-     * @param proceedingJoinPoint
-     * @param maxTimerDrivenThreadCount
-     * @param controllerFacade
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint joint point
+     * @param maxTimerDrivenThreadCount thread count
+     * @param controllerFacade facade
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.controller.ControllerFacade) && "
             + "execution(void setMaxTimerDrivenThreadCount(int)) && "
@@ -201,13 +200,12 @@ public class ControllerAuditor extends NiFiAuditor {
     }
 
     /**
-     * Audits updating the max number of event driven threads for the
-     * controller.
+     * Audits updating the max number of event driven threads for the controller.
      *
-     * @param proceedingJoinPoint
-     * @param maxEventDrivenThreadCount
-     * @param controllerFacade
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @param maxEventDrivenThreadCount thread count
+     * @param controllerFacade facade
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.controller.ControllerFacade) && "
             + "execution(void setMaxEventDrivenThreadCount(int)) && "

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java
index a044b46..8ca3f0e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ControllerServiceAuditor.java
@@ -63,16 +63,12 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Audits the creation of controller service via createControllerService().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @return node
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ControllerServiceDAO+) && "
             + "execution(org.apache.nifi.controller.service.ControllerServiceNode createControllerService(org.apache.nifi.web.api.dto.ControllerServiceDTO))")
@@ -94,11 +90,11 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Audits the configuration of a single controller service.
      *
-     * @param proceedingJoinPoint
-     * @param controllerServiceDTO
-     * @param controllerServiceDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param controllerServiceDTO dto
+     * @param controllerServiceDAO dao
+     * @return object
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ControllerServiceDAO+) && "
             + "execution(org.apache.nifi.controller.service.ControllerServiceNode updateControllerService(org.apache.nifi.web.api.dto.ControllerServiceDTO)) && "
@@ -220,13 +216,14 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Audits the update of a component referencing a controller service.
      *
-     * @param proceedingJoinPoint
-     * @param controllerServiceId
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @return object
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ControllerServiceDAO+) && "
-            + "execution(org.apache.nifi.controller.service.ControllerServiceReference updateControllerServiceReferencingComponents(java.lang.String, org.apache.nifi.controller.ScheduledState, org.apache.nifi.controller.service.ControllerServiceState))")
+            + "execution(org.apache.nifi.controller.service.ControllerServiceReference "
+            + "updateControllerServiceReferencingComponents(java.lang.String, org.apache.nifi.controller.ScheduledState, "
+            + "org.apache.nifi.controller.service.ControllerServiceState))")
     public Object updateControllerServiceReferenceAdvice(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
         // update the controller service references
         final ControllerServiceReference controllerServiceReference = (ControllerServiceReference) proceedingJoinPoint.proceed();
@@ -255,12 +252,13 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Gets the update actions for all specified referencing components.
      *
-     * @param user
-     * @param actions
-     * @param visitedServices
-     * @param referencingComponents
+     * @param user user
+     * @param actions actions
+     * @param visitedServices services
+     * @param referencingComponents components
      */
-    private void getUpdateActionsForReferencingComponents(final NiFiUser user, final Collection<Action> actions, final Collection<String> visitedServices, final Set<ConfiguredComponent> referencingComponents) {
+    private void getUpdateActionsForReferencingComponents(
+            final NiFiUser user, final Collection<Action> actions, final Collection<String> visitedServices, final Set<ConfiguredComponent> referencingComponents) {
         // consider each component updates
         for (final ConfiguredComponent component : referencingComponents) {
             if (component instanceof ProcessorNode) {
@@ -329,10 +327,10 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a controller service via deleteControllerService().
      *
-     * @param proceedingJoinPoint
-     * @param controllerServiceId
-     * @param controllerServiceDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param controllerServiceId id
+     * @param controllerServiceDAO dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ControllerServiceDAO+) && "
             + "execution(void deleteControllerService(java.lang.String)) && "
@@ -358,9 +356,9 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a controller service.
      *
-     * @param controllerService
-     * @param operation
-     * @return
+     * @param controllerService service
+     * @param operation operation
+     * @return action
      */
     private Action generateAuditRecord(ControllerServiceNode controllerService, Operation operation) {
         return generateAuditRecord(controllerService, operation, null);
@@ -369,10 +367,10 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a controller service.
      *
-     * @param controllerService
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param controllerService service
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     private Action generateAuditRecord(ControllerServiceNode controllerService, Operation operation, ActionDetails actionDetails) {
         Action action = null;
@@ -406,12 +404,11 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     }
 
     /**
-     * Extracts the values for the configured properties from the specified
-     * ControllerService.
+     * Extracts the values for the configured properties from the specified ControllerService.
      *
-     * @param controllerService
-     * @param controllerServiceDTO
-     * @return
+     * @param controllerService service
+     * @param controllerServiceDTO dto
+     * @return properties
      */
     private Map<String, String> extractConfiguredPropertyValues(ControllerServiceNode controllerService, ControllerServiceDTO controllerServiceDTO) {
         Map<String, String> values = new HashMap<>();
@@ -447,12 +444,11 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     }
 
     /**
-     * Locates the actual property descriptor for the given spec property
-     * descriptor.
+     * Locates the actual property descriptor for the given spec property descriptor.
      *
-     * @param propertyDescriptors
-     * @param specDescriptor
-     * @return
+     * @param propertyDescriptors descriptors
+     * @param specDescriptor example descriptor
+     * @return property
      */
     private PropertyDescriptor locatePropertyDescriptor(Set<PropertyDescriptor> propertyDescriptors, PropertyDescriptor specDescriptor) {
         for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {
@@ -464,11 +460,10 @@ public class ControllerServiceAuditor extends NiFiAuditor {
     }
 
     /**
-     * Returns whether the specified controller service is disabled (or
-     * disabling).
+     * Returns whether the specified controller service is disabled (or disabling).
      *
-     * @param controllerService
-     * @return
+     * @param controllerService service
+     * @return whether the specified controller service is disabled (or disabling)
      */
     private boolean isDisabled(final ControllerServiceNode controllerService) {
         return ControllerServiceState.DISABLED.equals(controllerService.getState()) || ControllerServiceState.DISABLING.equals(controllerService.getState());

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/FunnelAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/FunnelAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/FunnelAuditor.java
index e96604c..e864c89 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/FunnelAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/FunnelAuditor.java
@@ -33,9 +33,6 @@ import org.aspectj.lang.annotation.Aspect;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/**
- *
- */
 @Aspect
 public class FunnelAuditor extends NiFiAuditor {
 
@@ -44,9 +41,9 @@ public class FunnelAuditor extends NiFiAuditor {
     /**
      * Audits the creation of a funnel.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @return funnel
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.FunnelDAO+) && "
             + "execution(org.apache.nifi.connectable.Funnel createFunnel(java.lang.String, org.apache.nifi.web.api.dto.FunnelDTO))")
@@ -68,11 +65,11 @@ public class FunnelAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a funnel.
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param funnelId
-     * @param funnelDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param funnelId funnel id
+     * @param funnelDAO funnel dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.FunnelDAO+) && "
             + "execution(void deleteFunnel(java.lang.String, java.lang.String)) && "
@@ -97,9 +94,9 @@ public class FunnelAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of the specified funnel.
      *
-     * @param funnel
-     * @param operation
-     * @return
+     * @param funnel funnel
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(Funnel funnel, Operation operation) {
         return generateAuditRecord(funnel, operation, null);
@@ -108,10 +105,10 @@ public class FunnelAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of the specified funnel.
      *
-     * @param funnel
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param funnel funnel
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(Funnel funnel, Operation operation, ActionDetails actionDetails) {
         Action action = null;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/NiFiAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/NiFiAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/NiFiAuditor.java
index e61a4a6..4477cce 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/NiFiAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/NiFiAuditor.java
@@ -41,8 +41,8 @@ public abstract class NiFiAuditor {
     /**
      * Records the specified action.
      *
-     * @param action
-     * @param logger
+     * @param action action
+     * @param logger logger
      */
     protected void saveAction(Action action, Logger logger) {
         final Collection<Action> actions = new ArrayList<>();
@@ -53,8 +53,8 @@ public abstract class NiFiAuditor {
     /**
      * Records the actions.
      *
-     * @param actions
-     * @param logger
+     * @param actions actions
+     * @param logger logger
      */
     protected void saveActions(Collection<Action> actions, Logger logger) {
         ClusterContext ctx = ClusterContextThreadLocal.getContext();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/PortAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/PortAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/PortAuditor.java
index 479842c..2df883d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/PortAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/PortAuditor.java
@@ -43,9 +43,6 @@ import org.aspectj.lang.annotation.Aspect;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/**
- *
- */
 @Aspect
 public class PortAuditor extends NiFiAuditor {
 
@@ -54,9 +51,9 @@ public class PortAuditor extends NiFiAuditor {
     /**
      * Audits the creation of a port.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @return port
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.PortDAO+) && "
             + "execution(org.apache.nifi.connectable.Port createPort(java.lang.String, org.apache.nifi.web.api.dto.PortDTO))")
@@ -78,12 +75,12 @@ public class PortAuditor extends NiFiAuditor {
     /**
      * Audits the update of a port.
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param portDTO
-     * @param portDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param portDTO port dto
+     * @param portDAO port dao
+     * @return port
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.PortDAO+) && "
             + "execution(org.apache.nifi.connectable.Port updatePort(java.lang.String, org.apache.nifi.web.api.dto.PortDTO)) && "
@@ -263,11 +260,11 @@ public class PortAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a processor via deleteProcessor().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param portId
-     * @param portDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param portId port id
+     * @param portDAO port dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.PortDAO+) && "
             + "execution(void deletePort(java.lang.String, java.lang.String)) && "
@@ -293,9 +290,9 @@ public class PortAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of the specified port.
      *
-     * @param port
-     * @param operation
-     * @return
+     * @param port port
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(Port port, Operation operation) {
         return generateAuditRecord(port, operation, null);
@@ -304,10 +301,10 @@ public class PortAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of the specified port.
      *
-     * @param port
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param port port
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(Port port, Operation operation, ActionDetails actionDetails) {
         Action action = null;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessGroupAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessGroupAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessGroupAuditor.java
index 7acf4e2..fa5ac70 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessGroupAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessGroupAuditor.java
@@ -47,16 +47,12 @@ public class ProcessGroupAuditor extends NiFiAuditor {
     /**
      * Audits the creation of process groups via createProcessGroup().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @return group
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessGroupDAO+) && "
             + "execution(org.apache.nifi.groups.ProcessGroup createProcessGroup(java.lang.String, org.apache.nifi.web.api.dto.ProcessGroupDTO))")
@@ -79,10 +75,10 @@ public class ProcessGroupAuditor extends NiFiAuditor {
     /**
      * Audits the update of process group configuration.
      *
-     * @param proceedingJoinPoint
-     * @param processGroupDTO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param processGroupDTO dto
+     * @return group
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessGroupDAO+) && "
             + "execution(org.apache.nifi.groups.ProcessGroup updateProcessGroup(org.apache.nifi.web.api.dto.ProcessGroupDTO)) && "
@@ -190,9 +186,9 @@ public class ProcessGroupAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a process group via deleteProcessGroup().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessGroupDAO+) && "
             + "execution(void deleteProcessGroup(java.lang.String)) && "
@@ -218,9 +214,9 @@ public class ProcessGroupAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a process group.
      *
-     * @param processGroup
-     * @param operation
-     * @return
+     * @param processGroup group
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(ProcessGroup processGroup, Operation operation) {
         return generateAuditRecord(processGroup, operation, null);
@@ -229,10 +225,10 @@ public class ProcessGroupAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a process group.
      *
-     * @param processGroup
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param processGroup group
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(ProcessGroup processGroup, Operation operation, ActionDetails actionDetails) {
         Action action = null;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessorAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessorAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessorAuditor.java
index 0a9f857..23df486 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessorAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ProcessorAuditor.java
@@ -71,16 +71,12 @@ public class ProcessorAuditor extends NiFiAuditor {
     /**
      * Audits the creation of processors via createProcessor().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @return node
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
             + "execution(org.apache.nifi.controller.ProcessorNode createProcessor(java.lang.String, org.apache.nifi.web.api.dto.ProcessorDTO))")
@@ -102,12 +98,12 @@ public class ProcessorAuditor extends NiFiAuditor {
     /**
      * Audits the configuration of a single processor.
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param processorDTO
-     * @param processorDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param processorDTO dto
+     * @param processorDAO dao
+     * @return node
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
             + "execution(org.apache.nifi.controller.ProcessorNode updateProcessor(java.lang.String, org.apache.nifi.web.api.dto.ProcessorDTO)) && "
@@ -237,11 +233,11 @@ public class ProcessorAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a processor via deleteProcessor().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param processorId
-     * @param processorDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param processorId processor id
+     * @param processorDAO dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
             + "execution(void deleteProcessor(java.lang.String, java.lang.String)) && "
@@ -267,9 +263,9 @@ public class ProcessorAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a processor.
      *
-     * @param processor
-     * @param operation
-     * @return
+     * @param processor processor
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(ProcessorNode processor, Operation operation) {
         return generateAuditRecord(processor, operation, null);
@@ -278,10 +274,10 @@ public class ProcessorAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a processor.
      *
-     * @param processor
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param processor processor
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(ProcessorNode processor, Operation operation, ActionDetails actionDetails) {
         Action action = null;
@@ -315,12 +311,7 @@ public class ProcessorAuditor extends NiFiAuditor {
     }
 
     /**
-     * Extracts the values for the configured properties from the specified
-     * Processor.
-     *
-     * @param processor
-     * @param processorDTO
-     * @return
+     * Extracts the values for the configured properties from the specified Processor.
      */
     private Map<String, String> extractConfiguredPropertyValues(ProcessorNode processor, ProcessorDTO processorDTO) {
         Map<String, String> values = new HashMap<>();
@@ -389,12 +380,11 @@ public class ProcessorAuditor extends NiFiAuditor {
     }
 
     /**
-     * Locates the actual property descriptor for the given spec property
-     * descriptor.
+     * Locates the actual property descriptor for the given spec property descriptor.
      *
-     * @param propertyDescriptors
-     * @param specDescriptor
-     * @return
+     * @param propertyDescriptors properties
+     * @param specDescriptor example property
+     * @return property
      */
     private PropertyDescriptor locatePropertyDescriptor(Set<PropertyDescriptor> propertyDescriptors, PropertyDescriptor specDescriptor) {
         for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RelationshipAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RelationshipAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RelationshipAuditor.java
index b651904..c55a1c6 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RelationshipAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RelationshipAuditor.java
@@ -69,16 +69,12 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Audits the creation of relationships via createConnection().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @return connection
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ConnectionDAO+) && "
             + "execution(org.apache.nifi.connectable.Connection createConnection(java.lang.String, org.apache.nifi.web.api.dto.ConnectionDTO))")
@@ -101,12 +97,12 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Audits the creation and removal of relationships via updateConnection().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param connectionDTO
-     * @param connectionDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param connectionDTO dto
+     * @param connectionDAO dao
+     * @return connection
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ConnectionDAO+) && "
             + "execution(org.apache.nifi.connectable.Connection updateConnection(java.lang.String, org.apache.nifi.web.api.dto.ConnectionDTO)) && "
@@ -215,11 +211,11 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Audits the removal of relationships via deleteConnection().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param id
-     * @param connectionDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param id id
+     * @param connectionDAO dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ConnectionDAO+) && "
             + "execution(void deleteConnection(java.lang.String, java.lang.String)) && "
@@ -249,11 +245,11 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Creates action details for connect/disconnect actions.
      *
-     * @param connection
-     * @param source
-     * @param relationships
-     * @param destination
-     * @return
+     * @param connection connection
+     * @param source source
+     * @param relationships relationships
+     * @param destination destinations
+     * @return details
      */
     public ConnectDetails createConnectDetails(final Connection connection, final Connectable source, final Collection<Relationship> relationships, final Connectable destination) {
         final Component sourceType = determineConnectableType(source);
@@ -279,12 +275,11 @@ public class RelationshipAuditor extends NiFiAuditor {
     }
 
     /**
-     * Extracts configured settings from the specified connection only if they
-     * have also been specified in the connectionDTO.
+     * Extracts configured settings from the specified connection only if they have also been specified in the connectionDTO.
      *
-     * @param connection
-     * @param connectionDTO
-     * @return
+     * @param connection connection
+     * @param connectionDTO dto
+     * @return properties
      */
     private Map<String, String> extractConfiguredPropertyValues(Connection connection, ConnectionDTO connectionDTO) {
         Map<String, String> values = new HashMap<>();
@@ -315,9 +310,9 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Generates the audit records for the specified connection.
      *
-     * @param connection
-     * @param operation
-     * @return
+     * @param connection connection
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecordForConnection(Connection connection, Operation operation) {
         return generateAuditRecordForConnection(connection, operation, null);
@@ -326,10 +321,10 @@ public class RelationshipAuditor extends NiFiAuditor {
     /**
      * Generates the audit records for the specified connection.
      *
-     * @param connection
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param connection connection
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecordForConnection(Connection connection, Operation operation, ActionDetails actionDetails) {
         Action action = null;
@@ -374,9 +369,6 @@ public class RelationshipAuditor extends NiFiAuditor {
 
     /**
      * Determines the type of component the specified connectable is.
-     *
-     * @param connectable
-     * @return
      */
     private Component determineConnectableType(Connectable connectable) {
         String sourceId = connectable.getIdentifier();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RemoteProcessGroupAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RemoteProcessGroupAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RemoteProcessGroupAuditor.java
index ba9e629..f201fe2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RemoteProcessGroupAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/RemoteProcessGroupAuditor.java
@@ -50,19 +50,14 @@ public class RemoteProcessGroupAuditor extends NiFiAuditor {
     private static final Logger logger = LoggerFactory.getLogger(RemoteProcessGroupAuditor.class);
 
     /**
-     * Audits the creation of remote process groups via
-     * createRemoteProcessGroup().
+     * Audits the creation of remote process groups via createRemoteProcessGroup().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint join point
+     * @return group
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.RemoteProcessGroupDAO+) && "
             + "execution(org.apache.nifi.groups.RemoteProcessGroup createRemoteProcessGroup(java.lang.String, org.apache.nifi.web.api.dto.RemoteProcessGroupDTO))")
@@ -85,18 +80,19 @@ public class RemoteProcessGroupAuditor extends NiFiAuditor {
     /**
      * Audits the update of remote process group configuration.
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param remoteProcessGroupDTO
-     * @param remoteProcessGroupDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param remoteProcessGroupDTO dto
+     * @param remoteProcessGroupDAO dao
+     * @return group
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.RemoteProcessGroupDAO+) && "
             + "execution(org.apache.nifi.groups.RemoteProcessGroup updateRemoteProcessGroup(java.lang.String, org.apache.nifi.web.api.dto.RemoteProcessGroupDTO)) && "
             + "args(groupId, remoteProcessGroupDTO) && "
             + "target(remoteProcessGroupDAO)")
-    public RemoteProcessGroup auditUpdateProcessGroupConfiguration(ProceedingJoinPoint proceedingJoinPoint, String groupId, RemoteProcessGroupDTO remoteProcessGroupDTO, RemoteProcessGroupDAO remoteProcessGroupDAO) throws Throwable {
+    public RemoteProcessGroup auditUpdateProcessGroupConfiguration(
+            ProceedingJoinPoint proceedingJoinPoint, String groupId, RemoteProcessGroupDTO remoteProcessGroupDTO, RemoteProcessGroupDAO remoteProcessGroupDAO) throws Throwable {
         final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(groupId, remoteProcessGroupDTO.getId());
 
         // record the current value of this remoteProcessGroups configuration for comparisons later
@@ -300,11 +296,11 @@ public class RemoteProcessGroupAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a process group via deleteProcessGroup().
      *
-     * @param proceedingJoinPoint
-     * @param groupId
-     * @param remoteProcessGroupId
-     * @param remoteProcessGroupDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param groupId group id
+     * @param remoteProcessGroupId remote group id
+     * @param remoteProcessGroupDAO remote group dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.RemoteProcessGroupDAO+) && "
             + "execution(void deleteRemoteProcessGroup(java.lang.String, java.lang.String)) && "
@@ -329,9 +325,9 @@ public class RemoteProcessGroupAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the specified remote process group.
      *
-     * @param remoteProcessGroup
-     * @param operation
-     * @return
+     * @param remoteProcessGroup group
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(RemoteProcessGroup remoteProcessGroup, Operation operation) {
         return generateAuditRecord(remoteProcessGroup, operation, null);
@@ -340,10 +336,10 @@ public class RemoteProcessGroupAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the specified remote process group.
      *
-     * @param remoteProcessGroup
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param remoteProcessGroup group
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(RemoteProcessGroup remoteProcessGroup, Operation operation, ActionDetails actionDetails) {
         Action action = null;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ReportingTaskAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ReportingTaskAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ReportingTaskAuditor.java
index 38aaf1f..bad91ec 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ReportingTaskAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/ReportingTaskAuditor.java
@@ -56,16 +56,12 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     /**
      * Audits the creation of reporting task via createReportingTask().
      *
-     * This method only needs to be run 'after returning'. However, in Java 7
-     * the order in which these methods are returned from
-     * Class.getDeclaredMethods (even though there is no order guaranteed) seems
-     * to differ from Java 6. SpringAOP depends on this ordering to determine
-     * advice precedence. By normalizing all advice into Around advice we can
-     * alleviate this issue.
+     * This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
+     * seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws java.lang.Throwable
+     * @param proceedingJoinPoint joinpoint
+     * @return node
+     * @throws java.lang.Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ReportingTaskDAO+) && "
             + "execution(org.apache.nifi.controller.ReportingTaskNode createReportingTask(org.apache.nifi.web.api.dto.ReportingTaskDTO))")
@@ -87,11 +83,11 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     /**
      * Audits the configuration of a reporting task.
      *
-     * @param proceedingJoinPoint
-     * @param reportingTaskDTO
-     * @param reportingTaskDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint joinpoint
+     * @param reportingTaskDTO dto
+     * @param reportingTaskDAO dao
+     * @return object
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ReportingTaskDAO+) && "
             + "execution(org.apache.nifi.controller.ReportingTaskNode updateReportingTask(org.apache.nifi.web.api.dto.ReportingTaskDTO)) && "
@@ -220,10 +216,10 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     /**
      * Audits the removal of a reporting task via deleteReportingTask().
      *
-     * @param proceedingJoinPoint
-     * @param reportingTaskId
-     * @param reportingTaskDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param reportingTaskId task id
+     * @param reportingTaskDAO task dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.ReportingTaskDAO+) && "
             + "execution(void deleteReportingTask(java.lang.String)) && "
@@ -249,9 +245,9 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a reporting task.
      *
-     * @param reportingTask
-     * @param operation
-     * @return
+     * @param reportingTask task
+     * @param operation operation
+     * @return action
      */
     public Action generateAuditRecord(ReportingTaskNode reportingTask, Operation operation) {
         return generateAuditRecord(reportingTask, operation, null);
@@ -260,10 +256,10 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     /**
      * Generates an audit record for the creation of a reporting task.
      *
-     * @param reportingTask
-     * @param operation
-     * @param actionDetails
-     * @return
+     * @param reportingTask task
+     * @param operation operation
+     * @param actionDetails details
+     * @return action
      */
     public Action generateAuditRecord(ReportingTaskNode reportingTask, Operation operation, ActionDetails actionDetails) {
         Action action = null;
@@ -297,12 +293,11 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     }
 
     /**
-     * Extracts the values for the configured properties from the specified
-     * ReportingTask.
+     * Extracts the values for the configured properties from the specified ReportingTask.
      *
-     * @param reportingTask
-     * @param reportingTaskDTO
-     * @return
+     * @param reportingTask task
+     * @param reportingTaskDTO dto
+     * @return properties of task
      */
     private Map<String, String> extractConfiguredPropertyValues(ReportingTaskNode reportingTask, ReportingTaskDTO reportingTaskDTO) {
         Map<String, String> values = new HashMap<>();
@@ -335,12 +330,11 @@ public class ReportingTaskAuditor extends NiFiAuditor {
     }
 
     /**
-     * Locates the actual property descriptor for the given spec property
-     * descriptor.
+     * Locates the actual property descriptor for the given spec property descriptor.
      *
-     * @param propertyDescriptors
-     * @param specDescriptor
-     * @return
+     * @param propertyDescriptors properties
+     * @param specDescriptor example property
+     * @return property
      */
     private PropertyDescriptor locatePropertyDescriptor(Set<PropertyDescriptor> propertyDescriptors, PropertyDescriptor specDescriptor) {
         for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/SnippetAuditor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/SnippetAuditor.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/SnippetAuditor.java
index 38769bb..20ce740 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/SnippetAuditor.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/audit/SnippetAuditor.java
@@ -88,9 +88,9 @@ public class SnippetAuditor extends NiFiAuditor {
     /**
      * Audits copy/paste.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @return dto
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.SnippetDAO+) && "
             + "execution(org.apache.nifi.web.api.dto.FlowSnippetDTO copySnippet(java.lang.String, java.lang.String, java.lang.Double, java.lang.Double))")
@@ -104,9 +104,9 @@ public class SnippetAuditor extends NiFiAuditor {
     /**
      * Audits the instantiation of a template.
      *
-     * @param proceedingJoinPoint
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @return dto
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.TemplateDAO+) && "
             + "execution(org.apache.nifi.web.api.dto.FlowSnippetDTO instantiateTemplate(java.lang.String, java.lang.Double, java.lang.Double, java.lang.String))")
@@ -119,8 +119,6 @@ public class SnippetAuditor extends NiFiAuditor {
 
     /**
      * Audits the specified snippet.
-     *
-     * @param snippet
      */
     private void auditSnippet(final FlowSnippetDTO snippet) {
         final Collection<Action> actions = new ArrayList<>();
@@ -199,9 +197,6 @@ public class SnippetAuditor extends NiFiAuditor {
 
     /**
      * Determines the type of component the specified connectable is.
-     *
-     * @param connectable
-     * @return
      */
     private Component determineConnectableType(ConnectableDTO connectable) {
         Component component = Component.Controller;
@@ -224,11 +219,6 @@ public class SnippetAuditor extends NiFiAuditor {
 
     /**
      * Generates an audit record for the creation of the specified funnel.
-     *
-     * @param id
-     * @param name
-     * @param type
-     * @param operation
      */
     private Action generateAuditRecord(String id, String name, Component type, Operation operation, Date timestamp) {
         Action action = null;
@@ -255,11 +245,11 @@ public class SnippetAuditor extends NiFiAuditor {
     /**
      * Audits a bulk move.
      *
-     * @param proceedingJoinPoint
-     * @param snippetDTO
-     * @param snippetDAO
-     * @return
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param snippetDTO dto
+     * @param snippetDAO dao
+     * @return snippet
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.SnippetDAO+) && "
             + "execution(org.apache.nifi.controller.Snippet updateSnippet(org.apache.nifi.web.api.dto.SnippetDTO)) && "
@@ -349,10 +339,10 @@ public class SnippetAuditor extends NiFiAuditor {
     /**
      * Audits bulk delete.
      *
-     * @param proceedingJoinPoint
-     * @param snippetId
-     * @param snippetDAO
-     * @throws Throwable
+     * @param proceedingJoinPoint join point
+     * @param snippetId snippet id
+     * @param snippetDAO dao
+     * @throws Throwable ex
      */
     @Around("within(org.apache.nifi.web.dao.SnippetDAO+) && "
             + "execution(void deleteSnippet(java.lang.String)) && "

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/IllegalClusterResourceRequestException.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/IllegalClusterResourceRequestException.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/IllegalClusterResourceRequestException.java
index 1336a14..fa65504 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/IllegalClusterResourceRequestException.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/IllegalClusterResourceRequestException.java
@@ -17,8 +17,7 @@
 package org.apache.nifi.web;
 
 /**
- * Exception indicating a clustering-related request was issued to a node when
- * it should have been issued to the cluster manager.
+ * Exception indicating a clustering-related request was issued to a node when it should have been issued to the cluster manager.
  */
 @SuppressWarnings("serial")
 public class IllegalClusterResourceRequestException extends RuntimeException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
index fbd4742..9436fb8 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
@@ -545,7 +545,8 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
     }
 
     @Override
-    public ConfigurationSnapshot<RemoteProcessGroupPortDTO> updateRemoteProcessGroupInputPort(final Revision revision, final String groupId, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+    public ConfigurationSnapshot<RemoteProcessGroupPortDTO> updateRemoteProcessGroupInputPort(
+            final Revision revision, final String groupId, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
         return optimisticLockingManager.configureFlow(revision, new ConfigurationRequest<RemoteProcessGroupPortDTO>() {
             @Override
             public RemoteProcessGroupPortDTO execute() {
@@ -561,7 +562,8 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
     }
 
     @Override
-    public ConfigurationSnapshot<RemoteProcessGroupPortDTO> updateRemoteProcessGroupOutputPort(final Revision revision, final String groupId, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+    public ConfigurationSnapshot<RemoteProcessGroupPortDTO> updateRemoteProcessGroupOutputPort(
+            final Revision revision, final String groupId, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
         return optimisticLockingManager.configureFlow(revision, new ConfigurationRequest<RemoteProcessGroupPortDTO>() {
             @Override
             public RemoteProcessGroupPortDTO execute() {
@@ -1237,7 +1239,11 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
     }
 
     @Override
-    public ConfigurationSnapshot<Set<ControllerServiceReferencingComponentDTO>> updateControllerServiceReferencingComponents(final Revision revision, final String controllerServiceId, final org.apache.nifi.controller.ScheduledState scheduledState, final org.apache.nifi.controller.service.ControllerServiceState controllerServiceState) {
+    public ConfigurationSnapshot<Set<ControllerServiceReferencingComponentDTO>> updateControllerServiceReferencingComponents(
+            final Revision revision,
+            final String controllerServiceId,
+            final org.apache.nifi.controller.ScheduledState scheduledState,
+            final org.apache.nifi.controller.service.ControllerServiceState controllerServiceState) {
         return optimisticLockingManager.configureFlow(revision, new ConfigurationRequest<Set<ControllerServiceReferencingComponentDTO>>() {
             @Override
             public Set<ControllerServiceReferencingComponentDTO> execute() {
@@ -1368,7 +1374,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
         try {
             userService.invalidateUserAccount(userId);
         } catch (final AccountNotFoundException anfe) {
-            // ignore 
+            // ignore
         }
     }
 
@@ -1774,7 +1780,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
         final List<Bulletin> results = bulletinRepository.findBulletins(queryBuilder.build());
 
         // perform the query and generate the results - iterating in reverse order since we are
-        // getting the most recent results by ordering by timestamp desc above. this gets the 
+        // getting the most recent results by ordering by timestamp desc above. this gets the
         // exact results we want but in reverse order
         final List<BulletinDTO> bulletins = new ArrayList<>();
         for (final ListIterator<Bulletin> bulletinIter = results.listIterator(results.size()); bulletinIter.hasPrevious();) {
@@ -1806,10 +1812,6 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
 
     /**
      * Ensures the specified user has permission to access the specified port.
-     *
-     * @param user
-     * @param port
-     * @return
      */
     private boolean isUserAuthorized(final NiFiUser user, final RootGroupPort port) {
         final boolean isSiteToSiteSecure = Boolean.TRUE.equals(properties.isSiteToSiteSecure());
@@ -1840,7 +1842,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
             throw new WebApplicationException(new Throwable("Unable to access details for current user."));
         }
 
-        // at this point we know that the user must have ROLE_NIFI because it's required 
+        // at this point we know that the user must have ROLE_NIFI because it's required
         // get to the endpoint that calls this method but we'll check again anyways
         final Set<Authority> authorities = user.getAuthorities();
         if (!authorities.contains(Authority.ROLE_NIFI)) {
@@ -2976,10 +2978,6 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
 
     /**
      * Utility method to get the oldest of the two specified dates.
-     *
-     * @param date1
-     * @param date2
-     * @return
      */
     private Date getOldestDate(final Date date1, final Date date2) {
         if (date1 == null && date2 == null) {
@@ -3001,10 +2999,6 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
 
     /**
      * Utility method to get the newest of the two specified dates.
-     *
-     * @param date1
-     * @param date2
-     * @return
      */
     private Date getNewestDate(final Date date1, final Date date2) {
         if (date1 == null && date2 == null) {
@@ -3025,11 +3019,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
     }
 
     /**
-     * Utility method for extracting component counts from the specified group
-     * status.
-     *
-     * @param groupStatus
-     * @return
+     * Utility method for extracting component counts from the specified group status.
      */
     private ProcessGroupCounts extractProcessGroupCounts(ProcessGroupStatus groupStatus) {
         int running = 0;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebConfigurationContext.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebConfigurationContext.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebConfigurationContext.java
index a1cfcd5..870ba56 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebConfigurationContext.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebConfigurationContext.java
@@ -68,8 +68,7 @@ import org.apache.nifi.web.api.entity.ReportingTaskEntity;
 import org.apache.nifi.web.util.ClientResponseUtils;
 
 /**
- * Implements the NiFiWebConfigurationContext interface to support a context in
- * both standalone and clustered environments.
+ * Implements the NiFiWebConfigurationContext interface to support a context in both standalone and clustered environments.
  */
 public class StandardNiFiWebConfigurationContext implements NiFiWebConfigurationContext {
 
@@ -95,7 +94,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
     public void saveActions(final NiFiWebRequestContext requestContext, final Collection<ConfigurationAction> configurationActions) {
         Objects.requireNonNull(configurationActions, "Actions cannot be null.");
 
-        // ensure the path could be 
+        // ensure the path could be
         if (requestContext.getExtensionType() == null) {
             throw new IllegalArgumentException("The UI extension type must be specified.");
         }
@@ -188,7 +187,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
             throw new ResourceNotFoundException(String.format("Configuration request context config did not have a component ID."));
         }
 
-        // ensure the path could be 
+        // ensure the path could be
         if (requestContext.getExtensionType() == null) {
             throw new IllegalArgumentException("The UI extension type must be specified.");
         }
@@ -225,7 +224,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
             throw new ResourceNotFoundException(String.format("Configuration request context did not have a component ID."));
         }
 
-        // ensure the path could be 
+        // ensure the path could be
         if (requestContext.getExtensionType() == null) {
             throw new IllegalArgumentException("The UI extension type must be specified.");
         }
@@ -259,17 +258,17 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
         /**
          * Gets the component details using the specified request context.
          *
-         * @param requestContext
-         * @return
+         * @param requestContext context
+         * @return the component details using the specified request context
          */
         ComponentDetails getComponentDetails(NiFiWebRequestContext requestContext);
 
         /**
          * Sets the annotation data using the specified request context.
          *
-         * @param requestContext
-         * @param annotationData
-         * @return
+         * @param requestContext context
+         * @param annotationData data
+         * @return details
          */
         ComponentDetails setAnnotationData(NiFiWebConfigurationRequestContext requestContext, String annotationData);
     }
@@ -392,8 +391,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
     }
 
     /**
-     * Interprets the request/response with the underlying ControllerService
-     * model.
+     * Interprets the request/response with the underlying ControllerService model.
      */
     private class ControllerServiceFacade implements ComponentFacade {
 
@@ -528,8 +526,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
     }
 
     /**
-     * Interprets the request/response with the underlying ControllerService
-     * model.
+     * Interprets the request/response with the underlying ControllerService model.
      */
     private class ReportingTaskFacade implements ComponentFacade {
 
@@ -664,11 +661,7 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
     }
 
     /**
-     * Gets the headers for the request to replicate to each node while
-     * clustered.
-     *
-     * @param config
-     * @return
+     * Gets the headers for the request to replicate to each node while clustered.
      */
     private Map<String, String> getHeaders(final NiFiWebRequestContext config) {
         final Map<String, String> headers = new HashMap<>();
@@ -694,10 +687,6 @@ public class StandardNiFiWebConfigurationContext implements NiFiWebConfiguration
 
     /**
      * Checks the specified response and drains the stream appropriately.
-     *
-     * @param nodeResponse
-     * @param revision
-     * @param id
      */
     private void checkResponse(final NodeResponse nodeResponse, final String id) {
         if (nodeResponse.hasThrowable()) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebContext.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebContext.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebContext.java
index 07b982f..37ad804 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebContext.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/StandardNiFiWebContext.java
@@ -63,8 +63,7 @@ import org.apache.nifi.controller.ControllerServiceLookup;
 import org.apache.nifi.web.util.ClientResponseUtils;
 
 /**
- * Implements the NiFiWebContext interface to support a context in both
- * standalone and clustered environments.
+ * Implements the NiFiWebContext interface to support a context in both standalone and clustered environments.
  */
 @Deprecated
 public class StandardNiFiWebContext implements NiFiWebContext {
@@ -285,10 +284,9 @@ public class StandardNiFiWebContext implements NiFiWebContext {
     }
 
     /**
-     * Gets the headers for the request to replicate to each node while
-     * clustered.
+     * Gets the headers for the request to replicate to each node while clustered.
      *
-     * @param config  config
+     * @param config config
      * @return headers
      */
     private Map<String, String> getHeaders(final NiFiWebContextConfig config) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ApplicationResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ApplicationResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ApplicationResource.java
index 66b237e..aa51925 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ApplicationResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ApplicationResource.java
@@ -90,8 +90,8 @@ public abstract class ApplicationResource {
     /**
      * Generate a resource uri based off of the specified parameters.
      *
-     * @param path
-     * @return
+     * @param path path
+     * @return resource uri
      */
     protected String generateResourceUri(String... path) {
         UriBuilder uriBuilder = uriInfo.getBaseUriBuilder();
@@ -150,8 +150,8 @@ public abstract class ApplicationResource {
     /**
      * Edit the response headers to indicating no caching.
      *
-     * @param response
-     * @return
+     * @param response response
+     * @return builder
      */
     protected ResponseBuilder noCache(ResponseBuilder response) {
         CacheControl cacheControl = new CacheControl();
@@ -162,12 +162,10 @@ public abstract class ApplicationResource {
     }
 
     /**
-     * If the application is operating as a node, then this method adds the
-     * cluster context information to the response using the response header
-     * 'X-CLUSTER_CONTEXT'.
+     * If the application is operating as a node, then this method adds the cluster context information to the response using the response header 'X-CLUSTER_CONTEXT'.
      *
-     * @param response
-     * @return
+     * @param response response
+     * @return builder
      */
     protected ResponseBuilder clusterContext(ResponseBuilder response) {
 
@@ -208,7 +206,7 @@ public abstract class ApplicationResource {
                 Action batchAction = new Action();
                 batchAction.setOperation(Operation.Batch);
 
-                // copy values from prototype action 
+                // copy values from prototype action
                 batchAction.setTimestamp(prototypeAction.getTimestamp());
                 batchAction.setUserDn(prototypeAction.getUserDn());
                 batchAction.setUserName(prototypeAction.getUserName());
@@ -231,8 +229,7 @@ public abstract class ApplicationResource {
     }
 
     /**
-     * @return the cluster context if found in the request header
-     * 'X-CLUSTER_CONTEXT'.
+     * @return the cluster context if found in the request header 'X-CLUSTER_CONTEXT'.
      */
     protected ClusterContext getClusterContextFromRequest() {
         String clusterContextHeaderValue = httpServletRequest.getHeader(WebClusterManager.CLUSTER_CONTEXT_HTTP_HEADER);
@@ -253,7 +250,7 @@ public abstract class ApplicationResource {
     /**
      * Generates an Ok response with no content.
      *
-     * @return
+     * @return an Ok response with no content
      */
     protected ResponseBuilder generateOkResponse() {
         return noCache(Response.ok());
@@ -274,7 +271,7 @@ public abstract class ApplicationResource {
      * Generates a 201 Created response with the specified content.
      *
      * @param uri The URI
-     * @param jsonResponse The content
+     * @param entity entity
      * @return The response to be built
      */
     protected ResponseBuilder generateCreatedResponse(URI uri, Object entity) {
@@ -283,10 +280,9 @@ public abstract class ApplicationResource {
     }
 
     /**
-     * Generates a 150 Node Continue response to be used within the cluster
-     * request handshake.
+     * Generates a 150 Node Continue response to be used within the cluster request handshake.
      *
-     * @return
+     * @return a 150 Node Continue response to be used within the cluster request handshake
      */
     protected ResponseBuilder generateContinueResponse() {
         return Response.status(WebClusterManager.NODE_CONTINUE_STATUS_CODE);
@@ -341,8 +337,8 @@ public abstract class ApplicationResource {
 
     protected Map<String, String> getHeaders(final Map<String, String> overriddenHeaders) {
 
-        final Map<String, String> result = new HashMap<String, String>();
-        final Map<String, String> overriddenHeadersIgnoreCaseMap = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
+        final Map<String, String> result = new HashMap<>();
+        final Map<String, String> overriddenHeadersIgnoreCaseMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
         overriddenHeadersIgnoreCaseMap.putAll(overriddenHeaders);
 
         final Enumeration<String> headerNames = httpServletRequest.getHeaderNames();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
index 32dafb9..7c59cea 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
@@ -49,11 +49,8 @@ public class BulletinBoardResource extends ApplicationResource {
     /**
      * Retrieves all the of templates in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param after Supporting querying for bulletins after a particular
-     * bulletin id.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param after Supporting querying for bulletins after a particular bulletin id.
      * @param limit The max number of bulletins to return.
      * @param sourceName Source name filter. Supports a regular expression.
      * @param message Message filter. Supports a regular expression.

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
index a99d7df..b080fc6 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
@@ -87,7 +87,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Locates the ClusterConnection sub-resource.
      *
-     * @return
+     * @return node resource
      */
     @Path("/nodes")
     public NodeResource getNodeResource() {
@@ -261,7 +261,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Updates the processors annotation data.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param version The revision is used to verify the client is working with the latest version of the flow.
      * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param processorId The id of the processor.
@@ -314,7 +314,7 @@ public class ClusterResource extends ApplicationResource {
     /**
      * Updates the processors annotation data.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param processorId The id of the processor.
      * @param processorEntity A processorEntity.
      * @return A processorEntity.


[49/50] [abbrv] incubator-nifi git commit: Merge branch 'NIFI-292' of https://git-wip-us.apache.org/repos/asf/incubator-nifi into NIFI-292

Posted by mc...@apache.org.
Merge branch 'NIFI-292' of https://git-wip-us.apache.org/repos/asf/incubator-nifi into NIFI-292


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/5f2bd4fb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/5f2bd4fb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/5f2bd4fb

Branch: refs/heads/NIFI-292
Commit: 5f2bd4fb9702ab4668a74e63c9c55a1edab7c268
Parents: 9bdc752 ead451f
Author: Matt Gilman <ma...@gmail.com>
Authored: Tue Apr 28 09:57:08 2015 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Tue Apr 28 09:57:08 2015 -0400

----------------------------------------------------------------------
 .../main/java/org/apache/nifi/web/api/BulletinBoardResource.java    | 1 +
 .../src/main/java/org/apache/nifi/web/api/ClusterResource.java      | 1 +
 .../src/main/java/org/apache/nifi/web/api/ConnectionResource.java   | 1 +
 .../src/main/java/org/apache/nifi/web/api/ControllerResource.java   | 1 +
 4 files changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/5f2bd4fb/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/5f2bd4fb/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/5f2bd4fb/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/5f2bd4fb/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
----------------------------------------------------------------------


[46/50] [abbrv] incubator-nifi git commit: NIFI-292: - Annotating endpoints using swagger. - Started building the template for the REST documentation.

Posted by mc...@apache.org.
NIFI-292:
- Annotating endpoints using swagger.
- Started building the template for the REST documentation.

NIFI-292:
- Continuing to annotate endpoints using swagger.
- Continuing to build the templates for the REST documentation.


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/180534b1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/180534b1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/180534b1

Branch: refs/heads/NIFI-292
Commit: 180534b178220970db481be846b5f1b5d784e36d
Parents: 6c3256e
Author: Matt Gilman <ma...@gmail.com>
Authored: Fri Apr 24 14:45:18 2015 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Tue Apr 28 07:28:10 2015 -0400

----------------------------------------------------------------------
 .../nifi-web/nifi-web-api/pom.xml               |  57 ++-
 .../nifi/web/api/BulletinBoardResource.java     |   5 +
 .../apache/nifi/web/api/ClusterResource.java    | 495 +++++++++++++++++--
 .../apache/nifi/web/api/ConnectionResource.java | 167 ++++++-
 .../apache/nifi/web/api/ControllerResource.java | 439 +++++++++++++++-
 .../nifi/web/api/ControllerServiceResource.java |   2 +
 .../org/apache/nifi/web/api/FunnelResource.java |   2 +
 .../apache/nifi/web/api/HistoryResource.java    |   2 +
 .../apache/nifi/web/api/InputPortResource.java  |   2 +
 .../org/apache/nifi/web/api/LabelResource.java  |   2 +
 .../org/apache/nifi/web/api/NodeResource.java   |   2 +
 .../apache/nifi/web/api/OutputPortResource.java |   2 +
 .../nifi/web/api/ProcessGroupResource.java      |  77 +++
 .../apache/nifi/web/api/ProcessorResource.java  |  94 ++++
 .../apache/nifi/web/api/ProvenanceResource.java |   2 +
 .../web/api/RemoteProcessGroupResource.java     |   2 +
 .../nifi/web/api/ReportingTaskResource.java     |   2 +
 .../apache/nifi/web/api/SnippetResource.java    |   2 +
 .../nifi/web/api/SystemDiagnosticsResource.java |   5 +
 .../apache/nifi/web/api/TemplateResource.java   |   2 +
 .../apache/nifi/web/api/UserGroupResource.java  |   2 +
 .../org/apache/nifi/web/api/UserResource.java   |   2 +
 .../src/main/resources/templates/endpoint.hbs   |  61 +++
 .../src/main/resources/templates/example.hbs    |  16 +
 .../src/main/resources/templates/index.html.hbs | 355 +++++++++++++
 .../src/main/resources/templates/operation.hbs  | 112 +++++
 .../src/main/resources/templates/type.hbs       |  51 ++
 27 files changed, 1907 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/pom.xml
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/pom.xml b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/pom.xml
index 5a87ff8..3639016 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/pom.xml
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/pom.xml
@@ -89,6 +89,44 @@
                     <reuseForks>false</reuseForks>
                 </configuration>
             </plugin>
+            <plugin>
+                <groupId>com.github.kongchen</groupId>
+                <artifactId>swagger-maven-plugin</artifactId>
+                <version>3.0-M1</version>
+                <configuration>
+                    <apiSources>
+                        <apiSource>
+                            <locations>org.apache.nifi.web.api</locations>
+                            <schemes>http,https</schemes>
+                            <basePath>/nifi-api</basePath>
+                            <info>
+                                <title>NiFi Rest Api</title>
+                                <version>${project.version}</version>
+                                <!--<description>This is a sample for swagger-maven-plugin</description>-->
+                                <contact>
+                                    <email>dev@nifi.incubator.apache.org</email>
+                                    <url>https://nifi.incubator.apache.org/</url>
+                                </contact>
+                                <license>
+                                    <url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
+                                    <name>Apache 2.0</name>
+                                </license>
+                            </info>
+                            <templatePath>classpath:/templates/index.html.hbs</templatePath>
+                            <outputPath>${project.build.directory}/${project.artifactId}-${project.version}/docs/rest-api/index.html</outputPath>
+                            <swaggerDirectory>${project.build.directory}/swagger-ui</swaggerDirectory>
+                        </apiSource>
+                    </apiSources>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>compile</phase>
+                        <goals>
+                            <goal>generate</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
         </plugins>
     </build>
     <dependencies>
@@ -119,15 +157,8 @@
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-client-dto</artifactId>
-            <classifier>sources</classifier>
-            <scope>provided</scope>
-            <optional>true</optional>
-            <version>0.1.0-incubating-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-data-provenance-utils</artifactId>
+            <scope>provided</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -198,6 +229,11 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
+            <groupId>com.wordnik</groupId>
+            <artifactId>swagger-annotations</artifactId>
+            <version>1.5.3-M1</version>
+        </dependency>
+        <dependency>
             <groupId>org.quartz-scheduler</groupId>
             <artifactId>quartz</artifactId>
             <scope>provided</scope>
@@ -208,6 +244,11 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-beans</artifactId>
             <scope>provided</scope>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
index 7c59cea..d496ed7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/BulletinBoardResource.java
@@ -16,8 +16,11 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
+import javax.ws.rs.Consumes;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
+import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
@@ -40,6 +43,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Template.
  */
+@Api(hidden = true)
 public class BulletinBoardResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(BulletinBoardResource.class);
@@ -59,6 +63,7 @@ public class BulletinBoardResource extends ApplicationResource {
      * @return A bulletinBoardEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(BulletinBoardEntity.class)

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
index b080fc6..7d76179 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ClusterResource.java
@@ -69,6 +69,12 @@ import org.apache.commons.lang3.StringUtils;
 import org.springframework.security.access.prepost.PreAuthorize;
 
 import com.sun.jersey.api.core.ResourceContext;
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import org.apache.nifi.web.api.dto.status.ClusterProcessGroupStatusDTO;
 import org.apache.nifi.web.api.entity.ClusterProcessGroupStatusEntity;
 import org.codehaus.enunciate.jaxrs.TypeHint;
@@ -77,6 +83,10 @@ import org.codehaus.enunciate.jaxrs.TypeHint;
  * RESTful endpoint for managing a cluster.
  */
 @Path("/cluster")
+@Api(
+        value = "/cluster",
+        description = "Provides access to the cluster of Nodes that comprise this NiFi"
+)
 public class ClusterResource extends ApplicationResource {
 
     @Context
@@ -90,6 +100,10 @@ public class ClusterResource extends ApplicationResource {
      * @return node resource
      */
     @Path("/nodes")
+    @ApiOperation(
+            value = "Gets the node resource",
+            response = NodeResource.class
+    )
     public NodeResource getNodeResource() {
         return resourceContext.getResource(NodeResource.class);
     }
@@ -101,11 +115,34 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterStatusEntity
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterStatusEntity.class)
-    public Response getClusterStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets the status of the cluster",
+            response = ClusterStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getClusterStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         if (properties.isClusterManager()) {
 
@@ -134,7 +171,8 @@ public class ClusterResource extends ApplicationResource {
      * @return An OK response with an empty entity body.
      */
     @HEAD
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     public Response getClusterHead() {
         if (properties.isClusterManager()) {
             return Response.ok().build();
@@ -150,10 +188,34 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterEntity.class)
-    public Response getCluster(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets the contents of the cluster",
+            notes = "Returns the contents of the cluster including all nodes and their status.",
+            response = ClusterEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getCluster(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         if (properties.isClusterManager()) {
 
@@ -182,11 +244,35 @@ public class ClusterResource extends ApplicationResource {
      * @return Nodes that match the specified criteria
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/search-results")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterSearchResultsEntity.class)
-    public Response searchCluster(@QueryParam("q") @DefaultValue(StringUtils.EMPTY) String value) {
+    @ApiOperation(
+            value = "Searches the cluster for a node with the specified address",
+            response = ClusterSearchResultsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response searchCluster(
+            @ApiParam(
+                    value = "Node address to search for",
+                    required = true
+            )
+            @QueryParam("q") @DefaultValue(StringUtils.EMPTY) String value) {
 
         // ensure this is the cluster manager
         if (properties.isClusterManager()) {
@@ -233,11 +319,41 @@ public class ClusterResource extends ApplicationResource {
      * @return A processorEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/processors/{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ProcessorEntity.class)
-    public Response getProcessor(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets the specified processor",
+            response = ProcessorEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessor(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The processor id",
+                    required = true
+            )
+            @PathParam("id") String id) {
+        
         if (!properties.isClusterManager()) {
 
             final ProcessorDTO dto = serviceFacade.getProcessor(id);
@@ -270,7 +386,7 @@ public class ClusterResource extends ApplicationResource {
      */
     @PUT
     @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/processors/{id}")
     @PreAuthorize("hasAnyRole('ROLE_DFM')")
     @TypeHint(ProcessorEntity.class)
@@ -321,13 +437,39 @@ public class ClusterResource extends ApplicationResource {
      */
     @PUT
     @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/processors/{id}")
     @PreAuthorize("hasAnyRole('ROLE_DFM')")
     @TypeHint(ProcessorEntity.class)
+    @ApiOperation(
+            value = "Updates processor annotation data",
+            response = ProcessorEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateProcessor(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The processor id",
+                    required = true
+            )
             @PathParam("id") final String processorId,
+            @ApiParam(
+                    value = "The processor configuration details. The only configuration that will be honored at this endpoint is the processor annontation data.",
+                    required = true
+            )
             final ProcessorEntity processorEntity) {
 
         if (!properties.isClusterManager()) {
@@ -392,11 +534,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessorStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/processors/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterProcessorStatusEntity.class)
-    public Response getProcessorStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets the processor status across the cluster",
+            response = ClusterProcessorStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessorStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The processor id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -426,11 +597,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessorStatusHistoryEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/processors/{id}/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterStatusHistoryEntity.class)
-    public Response getProcessorStatusHistory(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets processor status history across the cluster",
+            response = ClusterStatusHistoryEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessorStatusHistory(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The processor id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
             final ClusterStatusHistoryDTO dto = serviceFacade.getClusterProcessorStatusHistory(id);
@@ -459,11 +659,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessorStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/connections/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterConnectionStatusEntity.class)
-    public Response getConnectionStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets connection status across the cluster",
+            response = ClusterConnectionStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getConnectionStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -493,11 +722,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessorStatusHistoryEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/connections/{id}/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterStatusHistoryEntity.class)
-    public Response getConnectionStatusHistory(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets connection status history across the cluster",
+            response = ClusterStatusHistoryEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getConnectionStatusHistory(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
             final ClusterStatusHistoryDTO dto = serviceFacade.getClusterConnectionStatusHistory(id);
@@ -526,11 +784,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessGroupStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/process-groups/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
-    @TypeHint(ClusterConnectionStatusEntity.class)
-    public Response getProcessGroupStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @TypeHint(ClusterProcessGroupStatusEntity.class)
+    @ApiOperation(
+            value = "Gets process group status across the cluster",
+            response = ClusterProcessGroupStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessGroupStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The process group id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -560,11 +847,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterProcessGroupStatusHistoryEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/process-groups/{id}/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterStatusHistoryEntity.class)
-    public Response getProcessGroupStatusHistory(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets process group status history across the cluster",
+            response = ClusterStatusHistoryEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getProcessGroupStatusHistory(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The process group id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
             final ClusterStatusHistoryDTO dto = serviceFacade.getClusterProcessGroupStatusHistory(id);
@@ -593,11 +909,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterRemoteProcessGroupStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/remote-process-groups/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterRemoteProcessGroupStatusEntity.class)
-    public Response getRemoteProcessGroupStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets remote process group status across the cluster",
+            response = ClusterRemoteProcessGroupStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getRemoteProcessGroupStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The remote process group id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -627,11 +972,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterPortStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/input-ports/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterPortStatusEntity.class)
-    public Response getInputPortStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets input port status across the cluster",
+            response = ClusterPortStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getInputPortStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The input port id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -661,11 +1035,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterPortStatusEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/output-ports/{id}/status")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterPortStatusEntity.class)
-    public Response getOutputPortStatus(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets output port status across the cluster",
+            response = ClusterPortStatusEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getOutputPortStatus(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The output port id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
 
@@ -695,11 +1098,40 @@ public class ClusterResource extends ApplicationResource {
      * @return A clusterRemoteProcessGroupStatusHistoryEntity
      */
     @GET
-    @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+    @Consumes(MediaType.WILDCARD)
+    @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
     @Path("/remote-process-groups/{id}/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ClusterStatusHistoryEntity.class)
-    public Response getRemoteProcessGroupStatusHistory(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets the remote process group status history across the cluster",
+            response = ClusterStatusHistoryEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "DFM", type = "ROLE_DFM"),
+                @Authorization(value = "Admin", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getRemoteProcessGroupStatusHistory(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The remote process group id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         if (properties.isClusterManager()) {
             final ClusterStatusHistoryDTO dto = serviceFacade.getClusterRemoteProcessGroupStatusHistory(id);
@@ -721,6 +1153,7 @@ public class ClusterResource extends ApplicationResource {
     }
 
     // setters
+    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/180534b1/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
index 137cc07..1e06fa4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
@@ -16,6 +16,12 @@
  */
 package org.apache.nifi.web.api;
 
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiParam;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+import com.wordnik.swagger.annotations.Authorization;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
@@ -73,6 +79,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 /**
  * RESTful endpoint for managing a Connection.
  */
+@Api(hidden = true)
 public class ConnectionResource extends ApplicationResource {
 
     private static final Logger logger = LoggerFactory.getLogger(ConnectionResource.class);
@@ -111,10 +118,33 @@ public class ConnectionResource extends ApplicationResource {
      * @return A connectionsEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ConnectionsEntity.class)
-    public Response getConnections(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
+    @ApiOperation(
+            value = "Gets all connections",
+            response = ConnectionsEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getConnections(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -145,11 +175,39 @@ public class ConnectionResource extends ApplicationResource {
      * @return A connectionEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{id}")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(ConnectionEntity.class)
-    public Response getConnection(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+    @ApiOperation(
+            value = "Gets a connection",
+            response = ConnectionEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getConnection(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
             @PathParam("id") String id) {
 
         // replicate if cluster manager
@@ -181,11 +239,40 @@ public class ConnectionResource extends ApplicationResource {
      * @return A statusHistoryEntity.
      */
     @GET
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{id}/status/history")
     @PreAuthorize("hasAnyRole('ROLE_MONITOR', 'ROLE_DFM', 'ROLE_ADMIN')")
     @TypeHint(StatusHistoryEntity.class)
-    public Response getConnectionStatusHistory(@QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, @PathParam("id") String id) {
+    @ApiOperation(
+            value = "Gets the status history for a connection",
+            response = StatusHistoryEntity.class,
+            authorizations = {
+                @Authorization(value = "Read Only", type = "ROLE_MONITOR"),
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM"),
+                @Authorization(value = "Administrator", type = "ROLE_ADMIN")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
+    public Response getConnectionStatusHistory(
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
+            @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId, 
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
+            @PathParam("id") String id) {
 
         // replicate if cluster manager
         if (properties.isClusterManager()) {
@@ -370,8 +457,28 @@ public class ConnectionResource extends ApplicationResource {
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ConnectionEntity.class)
+    @ApiOperation(
+            value = "Creates a connection",
+            response = ConnectionEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response createConnection(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The connection configuration details",
+                    required = true
+            )
             ConnectionEntity connectionEntity) {
 
         if (connectionEntity == null || connectionEntity.getConnection() == null) {
@@ -613,9 +720,33 @@ public class ConnectionResource extends ApplicationResource {
     @Path("/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ConnectionEntity.class)
+    @ApiOperation(
+            value = "Updates a connection",
+            response = ConnectionEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response updateConnection(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
             @PathParam("id") String id,
+            @ApiParam(
+                    value = "The connection configuration details",
+                    required = true
+            )
             ConnectionEntity connectionEntity) {
 
         if (connectionEntity == null || connectionEntity.getConnection() == null) {
@@ -680,14 +811,43 @@ public class ConnectionResource extends ApplicationResource {
      * @return An Entity containing the client id and an updated revision.
      */
     @DELETE
+    @Consumes(MediaType.WILDCARD)
     @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
     @Path("/{id}")
     @PreAuthorize("hasRole('ROLE_DFM')")
     @TypeHint(ConnectionEntity.class)
+    @ApiOperation(
+            value = "Deletes a connection",
+            response = ConnectionEntity.class,
+            authorizations = {
+                @Authorization(value = "Data Flow Manager", type = "ROLE_DFM")
+            }
+    )
+    @ApiResponses(
+            value = {
+                @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
+                @ApiResponse(code = 401, message = "Client could not be authenticated."),
+                @ApiResponse(code = 403, message = "Client is not authorized to make this request."),
+                @ApiResponse(code = 404, message = "The specified resource could not be found."),
+                @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
+            }
+    )
     public Response deleteRelationshipTarget(
             @Context HttpServletRequest httpServletRequest,
+            @ApiParam(
+                    value = "The revision is used to verify the client is working with the latest version of the flow",
+                    required = false
+            )
             @QueryParam(VERSION) LongParameter version,
+            @ApiParam(
+                    value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response",
+                    required = false
+            )
             @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) ClientIdParameter clientId,
+            @ApiParam(
+                    value = "The connection id",
+                    required = true
+            )
             @PathParam("id") String id) {
 
         // replicate if cluster manager
@@ -725,6 +885,7 @@ public class ConnectionResource extends ApplicationResource {
     }
 
     // setters
+    
     public void setServiceFacade(NiFiServiceFacade serviceFacade) {
         this.serviceFacade = serviceFacade;
     }


[05/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXQuery.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXQuery.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXQuery.java
index 99d5858..aae4411 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXQuery.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestEvaluateXQuery.java
@@ -43,8 +43,7 @@ import org.junit.Test;
 
 public class TestEvaluateXQuery {
 
-    private static final Path XML_SNIPPET = Paths.
-            get("src/test/resources/TestXml/fruit.xml");
+    private static final Path XML_SNIPPET = Paths.get("src/test/resources/TestXml/fruit.xml");
     private static final String[] fruitNames = {"apple", "apple", "banana", "orange", "blueberry", "raspberry", "none"};
 
     private static final String[] methods = {EvaluateXQuery.OUTPUT_METHOD_XML, EvaluateXQuery.OUTPUT_METHOD_HTML, EvaluateXQuery.OUTPUT_METHOD_TEXT};
@@ -56,15 +55,11 @@ public class TestEvaluateXQuery {
         for (int i = 0; i < methods.length; i++) {
             for (int j = 0; j < booleans.length; j++) {
                 for (int k = 0; k < booleans.length; k++) {
-                    Properties props = EvaluateXQuery.
-                            getTransformerProperties(methods[i], booleans[j], booleans[k]);
+                    Properties props = EvaluateXQuery.getTransformerProperties(methods[i], booleans[j], booleans[k]);
                     assertEquals(3, props.size());
-                    assertEquals(methods[i], props.
-                            getProperty(OutputKeys.METHOD));
-                    assertEquals(booleans[j] ? "yes" : "no", props.
-                            getProperty(OutputKeys.INDENT));
-                    assertEquals(booleans[k] ? "yes" : "no", props.
-                            getProperty(OutputKeys.OMIT_XML_DECLARATION));
+                    assertEquals(methods[i], props.getProperty(OutputKeys.METHOD));
+                    assertEquals(booleans[j] ? "yes" : "no", props.getProperty(OutputKeys.INDENT));
+                    assertEquals(booleans[k] ? "yes" : "no", props.getProperty(OutputKeys.OMIT_XML_DECLARATION));
                 }
             }
         }
@@ -162,14 +157,10 @@ public class TestEvaluateXQuery {
         List<String> resultStrings = new ArrayList<>();
 
         runnerProps.clear();
-        runnerProps.
-                put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_CONTENT);
+        runnerProps.put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_CONTENT);
         runnerProps.put(EvaluateXQuery.XML_OUTPUT_METHOD.getName(), method);
-        runnerProps.put(EvaluateXQuery.XML_OUTPUT_INDENT.getName(), Boolean.
-                toString(indent));
-        runnerProps.
-                put(EvaluateXQuery.XML_OUTPUT_OMIT_XML_DECLARATION.getName(), Boolean.
-                        toString(omitDeclaration));
+        runnerProps.put(EvaluateXQuery.XML_OUTPUT_INDENT.getName(), Boolean.toString(indent));
+        runnerProps.put(EvaluateXQuery.XML_OUTPUT_OMIT_XML_DECLARATION.getName(), Boolean.toString(omitDeclaration));
         runnerProps.put("xquery", xQuery);
         resultFlowFiles = runXquery(xml, runnerProps);
 
@@ -185,26 +176,22 @@ public class TestEvaluateXQuery {
 
     @Test(expected = java.lang.AssertionError.class)
     public void testBadXQuery() throws Exception {
-        doXqueryTest(XML_SNIPPET, "counttttttt(*:fruitbasket/fruit)", Arrays.
-                asList("7"));
+        doXqueryTest(XML_SNIPPET, "counttttttt(*:fruitbasket/fruit)", Arrays.asList("7"));
     }
 
     @Test
     public void testXQueries() throws Exception {
 
         /* count matches */
-        doXqueryTest(XML_SNIPPET, "count(*:fruitbasket/fruit)", Arrays.
-                asList("7"));
+        doXqueryTest(XML_SNIPPET, "count(*:fruitbasket/fruit)", Arrays.asList("7"));
         doXqueryTest(XML_SNIPPET, "count(//fruit)", Arrays.asList("7"));
 
         /* Using a namespace */
-        doXqueryTest(XML_SNIPPET, "declare namespace fb = \"http://namespace/1\"; count(fb:fruitbasket/fruit)", Arrays.
-                asList("7"));
+        doXqueryTest(XML_SNIPPET, "declare namespace fb = \"http://namespace/1\"; count(fb:fruitbasket/fruit)", Arrays.asList("7"));
 
         /* determine if node exists */
         doXqueryTest(XML_SNIPPET, "boolean(//fruit[1])", Arrays.asList("true"));
-        doXqueryTest(XML_SNIPPET, "boolean(//fruit[100])", Arrays.
-                asList("false"));
+        doXqueryTest(XML_SNIPPET, "boolean(//fruit[100])", Arrays.asList("false"));
 
         /* XML first match */
         doXqueryTest(XML_SNIPPET, "//fruit[1]", Arrays.asList(
@@ -242,16 +229,13 @@ public class TestEvaluateXQuery {
                 + "</wrap>"));
 
         /* String all matches fruit names*/
-        doXqueryTest(XML_SNIPPET, "for $x in //fruit return $x/name/text()", Arrays.
-                asList(fruitNames));
+        doXqueryTest(XML_SNIPPET, "for $x in //fruit return $x/name/text()", Arrays.asList(fruitNames));
 
         /* String first match fruit name (XPath)*/
-        doXqueryTest(XML_SNIPPET, "//fruit[1]/name/text()", Arrays.
-                asList("apple"));
+        doXqueryTest(XML_SNIPPET, "//fruit[1]/name/text()", Arrays.asList("apple"));
 
         /* String first match fruit color (XPath)*/
-        doXqueryTest(XML_SNIPPET, "//fruit[1]/color/text()", Arrays.
-                asList("red"));
+        doXqueryTest(XML_SNIPPET, "//fruit[1]/color/text()", Arrays.asList("red"));
 
         /* String first match fruit name (XQuery)*/
         doXqueryTest(XML_SNIPPET, "for $x in //fruit[1] return string-join(($x/name/text() , $x/color/text()), ' - ')",
@@ -296,31 +280,25 @@ public class TestEvaluateXQuery {
 
         /* String all matches name only, comma delimited (one result)*/
         doXqueryTest(XML_SNIPPET, "string-join((for $x in //fruit return $x/name/text()), ', ')",
-                Arrays.
-                asList("apple, apple, banana, orange, blueberry, raspberry, none"));
+                Arrays.asList("apple, apple, banana, orange, blueberry, raspberry, none"));
 
         /* String all matches color and name, comma delimited (one result)*/
         doXqueryTest(XML_SNIPPET, "string-join((for $y in (for $x in //fruit return string-join(($x/color/text() , $x/name/text()), ' ')) return $y), ', ')",
-                Arrays.
-                asList("red apple, green apple, yellow banana, orange orange, blue blueberry, red raspberry, none"));
+                Arrays.asList("red apple, green apple, yellow banana, orange orange, blue blueberry, red raspberry, none"));
 
         /* String all matches color and name, comma delimited using let(one result)*/
         doXqueryTest(XML_SNIPPET, "string-join((for $y in (for $x in //fruit let $d := string-join(($x/color/text() , $x/name/text()), ' ')  return $d) return $y), ', ')",
-                Arrays.
-                asList("red apple, green apple, yellow banana, orange orange, blue blueberry, red raspberry, none"));
+                Arrays.asList("red apple, green apple, yellow banana, orange orange, blue blueberry, red raspberry, none"));
 
 
         /* Query for attribute */
-        doXqueryTest(XML_SNIPPET, "string(//fruit[1]/@taste)", Arrays.
-                asList("crisp"));
+        doXqueryTest(XML_SNIPPET, "string(//fruit[1]/@taste)", Arrays.asList("crisp"));
 
         /* Query for comment */
-        doXqueryTest(XML_SNIPPET, "//fruit/comment()", Arrays.
-                asList(" Apples are my favorite "));
+        doXqueryTest(XML_SNIPPET, "//fruit/comment()", Arrays.asList(" Apples are my favorite "));
 
         /* Query for processing instruction */
-        doXqueryTest(XML_SNIPPET, "//processing-instruction()[name()='xml-stylesheet']", Arrays.
-                asList("type=\"text/xsl\" href=\"foo.xsl\""));
+        doXqueryTest(XML_SNIPPET, "//processing-instruction()[name()='xml-stylesheet']", Arrays.asList("type=\"text/xsl\" href=\"foo.xsl\""));
 
     }
 
@@ -332,8 +310,7 @@ public class TestEvaluateXQuery {
         // test read from content, write to attribute
         {
             runnerProps.clear();
-            runnerProps.
-                    put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_ATTRIBUTE);
+            runnerProps.put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_ATTRIBUTE);
             runnerProps.put("xquery", xQuery);
             resultFlowFiles = runXquery(xml, runnerProps);
 
@@ -346,10 +323,8 @@ public class TestEvaluateXQuery {
                 if (expectedResults.size() > 1) {
                     key += "." + ((int) i + 1);
                 }
-                final String actual = out.getAttribute(key).
-                        replaceAll(">\\s+<", "><");
-                final String expected = expectedResults.get(i).
-                        replaceAll(">\\s+<", "><");
+                final String actual = out.getAttribute(key).replaceAll(">\\s+<", "><");
+                final String expected = expectedResults.get(i).replaceAll(">\\s+<", "><");
                 assertEquals(expected, actual);
             }
         }
@@ -357,8 +332,7 @@ public class TestEvaluateXQuery {
         // test read from content, write to content
         {
             runnerProps.clear();
-            runnerProps.
-                    put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_CONTENT);
+            runnerProps.put(EvaluateXQuery.DESTINATION.getName(), EvaluateXQuery.DESTINATION_CONTENT);
             runnerProps.put("xquery", xQuery);
             resultFlowFiles = runXquery(xml, runnerProps);
 
@@ -368,11 +342,9 @@ public class TestEvaluateXQuery {
 
                 final MockFlowFile out = resultFlowFiles.get(i);
                 final byte[] outData = out.toByteArray();
-                final String outXml = new String(outData, "UTF-8").
-                        replaceAll(">\\s+<", "><");
+                final String outXml = new String(outData, "UTF-8").replaceAll(">\\s+<", "><");
                 final String actual = outXml;
-                final String expected = expectedResults.get(i).
-                        replaceAll(">\\s+<", "><");
+                final String expected = expectedResults.get(i).replaceAll(">\\s+<", "><");
                 assertEquals(expected, actual);
             }
         }
@@ -384,8 +356,7 @@ public class TestEvaluateXQuery {
 
     private List<MockFlowFile> runXquery(Path xml, Map<String, String> runnerProps, Map<String, String> flowFileAttributes) throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
 
         for (Entry<String, String> entry : runnerProps.entrySet()) {
             testRunner.setProperty(entry.getKey(), entry.getValue());
@@ -401,109 +372,81 @@ public class TestEvaluateXQuery {
 
     @Test
     public void testRootPath() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("xquery.result1", "/");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
-        final String attributeString = out.getAttribute("xquery.result1").
-                replaceAll(">\\s+<", "><");
-        final String xmlSnippetString = new String(Files.
-                readAllBytes(XML_SNIPPET), "UTF-8").replaceAll(">\\s+<", "><");
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
+        final String attributeString = out.getAttribute("xquery.result1").replaceAll(">\\s+<", "><");
+        final String xmlSnippetString = new String(Files.readAllBytes(XML_SNIPPET), "UTF-8").replaceAll(">\\s+<", "><");
 
         assertEquals(xmlSnippetString, attributeString);
     }
 
     @Test
     public void testCheckIfElementExists() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty("xquery.result.exist.1", "boolean(/*:fruitbasket/fruit[1])");
-        testRunner.
-                setProperty("xquery.result.exist.2", "boolean(/*:fruitbasket/fruit[100])");
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty("xquery.result.exist.1", "boolean(/*:fruitbasket/fruit[1])");
+        testRunner.setProperty("xquery.result.exist.2", "boolean(/*:fruitbasket/fruit[100])");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
         out.assertAttributeEquals("xquery.result.exist.1", "true");
         out.assertAttributeEquals("xquery.result.exist.2", "false");
     }
 
     @Test
     public void testUnmatchedContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
         testRunner.setProperty("xquery.result.exist.2", "/*:fruitbasket/node2");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_NO_MATCH, 1);
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testUnmatchedAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("xquery.result.exist.2", "/*:fruitbasket/node2");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_NO_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0);
         out.assertAttributeEquals("xquery.result.exist.2", null);
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testNoXQueryAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_NO_MATCH, 1);
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test(expected = java.lang.AssertionError.class)
     public void testNoXQueryContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
@@ -511,10 +454,8 @@ public class TestEvaluateXQuery {
 
     @Test
     public void testOneMatchOneUnmatchAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("some.property", "//fruit/name/text()");
         testRunner.setProperty("xquery.result.exist.2", "/*:fruitbasket/node2");
 
@@ -523,51 +464,37 @@ public class TestEvaluateXQuery {
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
 
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
 
         for (int i = 0; i < fruitNames.length; i++) {
-            final String outXml = out.
-                    getAttribute("some.property." + ((int) i + 1));
+            final String outXml = out.getAttribute("some.property." + ((int) i + 1));
             assertEquals(fruitNames[i], outXml.trim());
         }
 
         out.assertAttributeEquals("xquery.result.exist.2", null);
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testMatchedEmptyStringAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty("xquery.result.exist.2", "/*:fruitbasket/*[name='none']/color/text()");
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty("xquery.result.exist.2", "/*:fruitbasket/*[name='none']/color/text()");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_NO_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0);
 
         out.assertAttributeEquals("xquery.result.exist.2", null);
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_NO_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test(expected = java.lang.AssertionError.class)
     public void testMultipleXPathForContent() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
         testRunner.setProperty("some.property.1", "/*:fruitbasket/fruit[1]");
         testRunner.setProperty("some.property.2", "/*:fruitbasket/fruit[2]");
 
@@ -577,98 +504,71 @@ public class TestEvaluateXQuery {
 
     @Test
     public void testWriteStringToAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
-        testRunner.
-                setProperty("xquery.result2", "/*:fruitbasket/fruit[1]/name/text()");
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        testRunner.setProperty("xquery.result2", "/*:fruitbasket/fruit[1]/name/text()");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
         out.assertAttributeEquals("xquery.result2", "apple");
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testWriteStringToContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
-        testRunner.
-                setProperty("some.property", "/*:fruitbasket/fruit[1]/name/text()");
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        testRunner.setProperty("some.property", "/*:fruitbasket/fruit[1]/name/text()");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
         final byte[] outData = testRunner.getContentAsByteArray(out);
         final String outXml = new String(outData, "UTF-8");
-        assertTrue(outXml.trim().
-                equals("apple"));
+        assertTrue(outXml.trim().equals("apple"));
     }
 
     @Test
     public void testWriteXmlToAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("some.property", "/*:fruitbasket/fruit[1]/name");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
         final String outXml = out.getAttribute("some.property");
-        assertTrue(outXml.
-                contains("<name xmlns:ns=\"http://namespace/1\">apple</name>"));
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        assertTrue(outXml.contains("<name xmlns:ns=\"http://namespace/1\">apple</name>"));
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testWriteXmlToContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
         testRunner.setProperty("some.property", "/*:fruitbasket/fruit[1]/name");
 
         testRunner.enqueue(XML_SNIPPET);
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
         final byte[] outData = testRunner.getContentAsByteArray(out);
         final String outXml = new String(outData, "UTF-8");
-        assertTrue(outXml.
-                contains("<name xmlns:ns=\"http://namespace/1\">apple</name>"));
+        assertTrue(outXml.contains("<name xmlns:ns=\"http://namespace/1\">apple</name>"));
     }
 
     @Test
     public void testMatchesMultipleStringContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
         testRunner.setProperty("some.property", "//fruit/name/text()");
 
         testRunner.enqueue(XML_SNIPPET);
@@ -676,8 +576,7 @@ public class TestEvaluateXQuery {
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 7);
 
-        final List<MockFlowFile> flowFilesForRelMatch = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH);
+        final List<MockFlowFile> flowFilesForRelMatch = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH);
         for (int i = 0; i < flowFilesForRelMatch.size(); i++) {
 
             final MockFlowFile out = flowFilesForRelMatch.get(i);
@@ -689,10 +588,8 @@ public class TestEvaluateXQuery {
 
     @Test
     public void testMatchesMultipleStringAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("some.property", "//fruit/name/text()");
 
         testRunner.enqueue(XML_SNIPPET);
@@ -700,26 +597,19 @@ public class TestEvaluateXQuery {
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
 
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
 
         for (int i = 0; i < fruitNames.length; i++) {
-            final String outXml = out.
-                    getAttribute("some.property." + ((int) i + 1));
+            final String outXml = out.getAttribute("some.property." + ((int) i + 1));
             assertEquals(fruitNames[i], outXml.trim());
         }
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 
     @Test
     public void testMatchesMultipleXmlContent() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_CONTENT);
         testRunner.setProperty("some.property", "//fruit/name");
 
         testRunner.enqueue(XML_SNIPPET);
@@ -727,8 +617,7 @@ public class TestEvaluateXQuery {
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 7);
 
-        final List<MockFlowFile> flowFilesForRelMatch = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH);
+        final List<MockFlowFile> flowFilesForRelMatch = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH);
         for (int i = 0; i < flowFilesForRelMatch.size(); i++) {
 
             final MockFlowFile out = flowFilesForRelMatch.get(i);
@@ -741,10 +630,8 @@ public class TestEvaluateXQuery {
 
     @Test
     public void testMatchesMultipleXmlAttribute() throws XPathFactoryConfigurationException, IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new EvaluateXQuery());
-        testRunner.
-                setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
+        final TestRunner testRunner = TestRunners.newTestRunner(new EvaluateXQuery());
+        testRunner.setProperty(EvaluateXQuery.DESTINATION, EvaluateXQuery.DESTINATION_ATTRIBUTE);
         testRunner.setProperty("some.property", "//fruit/name");
 
         testRunner.enqueue(XML_SNIPPET);
@@ -752,18 +639,13 @@ public class TestEvaluateXQuery {
 
         testRunner.assertAllFlowFilesTransferred(EvaluateXQuery.REL_MATCH, 1);
 
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0);
 
         for (int i = 0; i < fruitNames.length; i++) {
-            final String outXml = out.
-                    getAttribute("some.property." + ((int) i + 1));
+            final String outXml = out.getAttribute("some.property." + ((int) i + 1));
             String expectedXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><name xmlns:ns=\"http://namespace/1\">" + fruitNames[i] + "</name>";
             assertEquals(expectedXml, outXml.trim());
         }
-        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).
-                get(0).
-                assertContentEquals(XML_SNIPPET);
+        testRunner.getFlowFilesForRelationship(EvaluateXQuery.REL_MATCH).get(0).assertContentEquals(XML_SNIPPET);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteProcess.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteProcess.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteProcess.java
index 0907f38..7529e6d 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteProcess.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteProcess.java
@@ -43,19 +43,16 @@ public class TestExecuteProcess {
         assertEquals(1, singleArg.size());
         assertEquals("hello", singleArg.get(0));
 
-        final List<String> twoArg = ExecuteProcess.
-                splitArgs("   hello    good-bye   ");
+        final List<String> twoArg = ExecuteProcess.splitArgs("   hello    good-bye   ");
         assertEquals(2, twoArg.size());
         assertEquals("hello", twoArg.get(0));
         assertEquals("good-bye", twoArg.get(1));
 
-        final List<String> singleQuotedArg = ExecuteProcess.
-                splitArgs("  \"hello\" ");
+        final List<String> singleQuotedArg = ExecuteProcess.splitArgs("  \"hello\" ");
         assertEquals(1, singleQuotedArg.size());
         assertEquals("hello", singleQuotedArg.get(0));
 
-        final List<String> twoQuotedArg = ExecuteProcess.
-                splitArgs("   hello \"good   bye\"");
+        final List<String> twoQuotedArg = ExecuteProcess.splitArgs("   hello \"good   bye\"");
         assertEquals(2, twoQuotedArg.size());
         assertEquals("hello", twoQuotedArg.get(0));
         assertEquals("good   bye", twoQuotedArg.get(1));
@@ -63,19 +60,16 @@ public class TestExecuteProcess {
 
     @Test
     public void testEcho() {
-        System.
-                setProperty("org.slf4j.simpleLogger.log.org.apache.nifi", "TRACE");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi", "TRACE");
 
-        final TestRunner runner = TestRunners.
-                newTestRunner(ExecuteProcess.class);
+        final TestRunner runner = TestRunners.newTestRunner(ExecuteProcess.class);
         runner.setProperty(ExecuteProcess.COMMAND, "echo");
         runner.setProperty(ExecuteProcess.COMMAND_ARGUMENTS, "test-args");
         runner.setProperty(ExecuteProcess.BATCH_DURATION, "500 millis");
 
         runner.run();
 
-        final List<MockFlowFile> flowFiles = runner.
-                getFlowFilesForRelationship(ExecuteProcess.REL_SUCCESS);
+        final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(ExecuteProcess.REL_SUCCESS);
         for (final MockFlowFile flowFile : flowFiles) {
             System.out.println(flowFile);
             System.out.println(new String(flowFile.toByteArray()));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteStreamCommand.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteStreamCommand.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteStreamCommand.java
index f95d644..4e4a6b0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteStreamCommand.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteStreamCommand.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ExecuteStreamCommand;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
@@ -48,10 +47,8 @@ public class TestExecuteStreamCommand {
     public static void init() {
         System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
         System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.ExecuteStreamCommand", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestExecuteStreamCommand", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.ExecuteStreamCommand", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestExecuteStreamCommand", "debug");
         LOGGER = LoggerFactory.getLogger(TestExecuteStreamCommand.class);
     }
 
@@ -61,46 +58,31 @@ public class TestExecuteStreamCommand {
         File dummy = new File("src/test/resources/ExecuteCommand/1000bytes.txt");
         String jarPath = exJar.getAbsolutePath();
         exJar.setExecutable(true);
-        final TestRunner controller = TestRunners.
-                newTestRunner(ExecuteStreamCommand.class);
+        final TestRunner controller = TestRunners.newTestRunner(ExecuteStreamCommand.class);
         controller.setValidateExpressionUsage(false);
         controller.enqueue(dummy.toPath());
         controller.setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "java");
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
         controller.run(1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
+        controller.assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
+        controller.assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
 
-        List<MockFlowFile> flowFiles = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
+        List<MockFlowFile> flowFiles = controller.getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
         MockFlowFile outputFlowFile = flowFiles.get(0);
         byte[] byteArray = outputFlowFile.toByteArray();
         String result = new String(byteArray);
-        assertTrue("Test was a success\r\n".equals(result) || "Test was a success\n".
-                equals(result));
+        assertTrue("Test was a success\r\n".equals(result) || "Test was a success\n".equals(result));
         assertEquals("0", outputFlowFile.getAttribute("execution.status"));
         assertEquals("java", outputFlowFile.getAttribute("execution.command"));
-        assertEquals("-jar;", outputFlowFile.
-                getAttribute("execution.command.args").
-                substring(0, 5));
+        assertEquals("-jar;", outputFlowFile.getAttribute("execution.command.args").substring(0, 5));
         String attribute = outputFlowFile.getAttribute("execution.command.args");
-        String expected = "src" + File.separator + "test" + File.separator + "resources" + File.separator + "ExecuteCommand" + File.separator
-                + "TestSuccess.jar";
-        assertEquals(expected, attribute.
-                substring(attribute.length() - expected.length()));
-
-        MockFlowFile originalFlowFile = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP).
-                get(0);
-        assertEquals(outputFlowFile.getAttribute("execution.status"), originalFlowFile.
-                getAttribute("execution.status"));
-        assertEquals(outputFlowFile.getAttribute("execution.command"), originalFlowFile.
-                getAttribute("execution.command"));
-        assertEquals(outputFlowFile.getAttribute("execution.command.args"), originalFlowFile.
-                getAttribute("execution.command.args"));
+        String expected = "src" + File.separator + "test" + File.separator + "resources" + File.separator + "ExecuteCommand" + File.separator + "TestSuccess.jar";
+        assertEquals(expected, attribute.substring(attribute.length() - expected.length()));
+
+        MockFlowFile originalFlowFile = controller.getFlowFilesForRelationship(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP).get(0);
+        assertEquals(outputFlowFile.getAttribute("execution.status"), originalFlowFile.getAttribute("execution.status"));
+        assertEquals(outputFlowFile.getAttribute("execution.command"), originalFlowFile.getAttribute("execution.command"));
+        assertEquals(outputFlowFile.getAttribute("execution.command.args"), originalFlowFile.getAttribute("execution.command.args"));
     }
 
     @Test
@@ -109,25 +91,17 @@ public class TestExecuteStreamCommand {
         File dummy = new File("src/test/resources/ExecuteCommand/1000bytes.txt");
         String jarPath = exJar.getAbsolutePath();
         exJar.setExecutable(true);
-        final TestRunner controller = TestRunners.
-                newTestRunner(ExecuteStreamCommand.class);
+        final TestRunner controller = TestRunners.newTestRunner(ExecuteStreamCommand.class);
         controller.setValidateExpressionUsage(false);
         controller.enqueue(dummy.toPath());
         controller.setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "java");
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
         controller.run(1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
-        List<MockFlowFile> flowFiles = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
-        assertEquals(0, flowFiles.get(0).
-                getSize());
-        assertEquals("Error: Unable to access jarfile", flowFiles.get(0).
-                getAttribute("execution.error").
-                substring(0, 31));
+        controller.assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
+        controller.assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
+        List<MockFlowFile> flowFiles = controller.getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
+        assertEquals(0, flowFiles.get(0).getSize());
+        assertEquals("Error: Unable to access jarfile", flowFiles.get(0).getAttribute("execution.error").substring(0, 31));
     }
 
     @Test
@@ -146,28 +120,20 @@ public class TestExecuteStreamCommand {
         fos.close();
         String jarPath = exJar.getAbsolutePath();
         exJar.setExecutable(true);
-        final TestRunner controller = TestRunners.
-                newTestRunner(ExecuteStreamCommand.class);
+        final TestRunner controller = TestRunners.newTestRunner(ExecuteStreamCommand.class);
         controller.setValidateExpressionUsage(false);
         controller.enqueue(dummy100MBytes.toPath());
         controller.setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "java");
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
         controller.run(1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
-        List<MockFlowFile> flowFiles = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
-        byte[] byteArray = flowFiles.get(0).
-                toByteArray();
+        controller.assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
+        controller.assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
+        List<MockFlowFile> flowFiles = controller.getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
+        byte[] byteArray = flowFiles.get(0).toByteArray();
         String result = new String(byteArray);
 
-        assertTrue(result.
-                contains(File.separator + "nifi-standard-processors:ModifiedResult\r\n")
-                || result.
-                contains(File.separator + "nifi-standard-processors:ModifiedResult\n"));
+        assertTrue(result.contains(File.separator + "nifi-standard-processors:ModifiedResult\r\n")
+                || result.contains(File.separator + "nifi-standard-processors:ModifiedResult\n"));
     }
 
     @Test
@@ -176,28 +142,20 @@ public class TestExecuteStreamCommand {
         File dummy = new File("src/test/resources/ExecuteCommand/1000bytes.txt");
         String jarPath = exJar.getAbsolutePath();
         exJar.setExecutable(true);
-        final TestRunner controller = TestRunners.
-                newTestRunner(ExecuteStreamCommand.class);
+        final TestRunner controller = TestRunners.newTestRunner(ExecuteStreamCommand.class);
         controller.setValidateExpressionUsage(false);
         controller.enqueue(dummy.toPath());
         controller.setProperty(ExecuteStreamCommand.WORKING_DIR, "target");
         controller.setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "java");
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "-jar;" + jarPath);
         controller.run(1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
-        controller.
-                assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
-        List<MockFlowFile> flowFiles = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
-        byte[] byteArray = flowFiles.get(0).
-                toByteArray();
+        controller.assertTransferCount(ExecuteStreamCommand.ORIGINAL_RELATIONSHIP, 1);
+        controller.assertTransferCount(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP, 1);
+        List<MockFlowFile> flowFiles = controller.getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
+        byte[] byteArray = flowFiles.get(0).toByteArray();
         String result = new String(byteArray);
-        assertTrue(result.
-                contains(File.separator + "nifi-standard-processors" + File.separator + "target:ModifiedResult\r\n")
-                || result.
-                contains(File.separator + "nifi-standard-processors" + File.separator + "target:ModifiedResult\n"));
+        assertTrue(result.contains(File.separator + "nifi-standard-processors" + File.separator + "target:ModifiedResult\r\n")
+                || result.contains(File.separator + "nifi-standard-processors" + File.separator + "target:ModifiedResult\n"));
     }
 
     // this is dependent on window with cygwin...so it's not enabled
@@ -207,8 +165,7 @@ public class TestExecuteStreamCommand {
         File testFile = new File("target/test.txt");
         testFile.delete();
         File dummy = new File("src/test/resources/ExecuteCommand/1000bytes.txt");
-        final TestRunner controller = TestRunners.
-                newTestRunner(ExecuteStreamCommand.class);
+        final TestRunner controller = TestRunners.newTestRunner(ExecuteStreamCommand.class);
         controller.setValidateExpressionUsage(false);
         controller.enqueue(dummy.toPath());
         controller.enqueue(dummy.toPath());
@@ -217,17 +174,13 @@ public class TestExecuteStreamCommand {
         controller.enqueue(dummy.toPath());
         controller.setProperty(ExecuteStreamCommand.WORKING_DIR, "target/xx1");
         controller.setThreadCount(6);
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "c:\\cygwin\\bin\\touch");
-        controller.
-                setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "test.txt");
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_COMMAND, "c:\\cygwin\\bin\\touch");
+        controller.setProperty(ExecuteStreamCommand.EXECUTION_ARGUMENTS, "test.txt");
         controller.assertValid();
         controller.run(6);
-        List<MockFlowFile> flowFiles = controller.
-                getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
+        List<MockFlowFile> flowFiles = controller.getFlowFilesForRelationship(ExecuteStreamCommand.OUTPUT_STREAM_RELATIONSHIP);
         assertEquals(5, flowFiles.size());
-        assertEquals(0, flowFiles.get(0).
-                getSize());
+        assertEquals(0, flowFiles.get(0).getSize());
 
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExtractText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExtractText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExtractText.java
index 045a4f9..fd47cf7 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExtractText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExtractText.java
@@ -37,14 +37,12 @@ public class TestExtractText {
     @Test
     public void testProcessor() throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty("regex.result1", "(?s)(.*)");
         testRunner.setProperty("regex.result2", "(?s).*(bar1).*");
         testRunner.setProperty("regex.result3", "(?s).*?(bar\\d).*"); // reluctant gets first
-        testRunner.
-                setProperty("regex.result4", "(?s).*?(?:bar\\d).*?(bar\\d).*?(bar3).*"); // reluctant w/ repeated pattern gets second
+        testRunner.setProperty("regex.result4", "(?s).*?(?:bar\\d).*?(bar\\d).*?(bar3).*"); // reluctant w/ repeated pattern gets second
         testRunner.setProperty("regex.result5", "(?s).*(bar\\d).*"); // greedy gets last
         testRunner.setProperty("regex.result6", "(?s)^(.*)$");
         testRunner.setProperty("regex.result7", "(?s)(XXX)");
@@ -53,9 +51,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_MATCH).get(0);
         out.assertAttributeEquals("regex.result1", SAMPLE_STRING);
         out.assertAttributeEquals("regex.result2", "bar1");
         out.assertAttributeEquals("regex.result3", "bar1");
@@ -72,8 +68,7 @@ public class TestExtractText {
     @Test
     public void testProcessorWithDotall() throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty(ExtractText.DOTALL, "true");
 
@@ -89,9 +84,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_MATCH).get(0);
         out.assertAttributeEquals("regex.result1", SAMPLE_STRING);
         out.assertAttributeEquals("regex.result2", "bar1");
         out.assertAttributeEquals("regex.result3", "bar1");
@@ -105,8 +98,7 @@ public class TestExtractText {
     @Test
     public void testProcessorWithMultiline() throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty(ExtractText.MULTILINE, "true");
 
@@ -124,9 +116,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_MATCH).get(0);
         out.assertAttributeEquals("regex.result1", "foo"); // matches everything on the first line
         out.assertAttributeEquals("regex.result2", "bar1");
         out.assertAttributeEquals("regex.result3", "bar1");
@@ -141,8 +131,7 @@ public class TestExtractText {
     @Test
     public void testProcessorWithMultilineAndDotall() throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty(ExtractText.MULTILINE, "true");
         testRunner.setProperty(ExtractText.DOTALL, "true");
@@ -161,9 +150,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_MATCH).get(0);
 
         out.assertAttributeEquals("regex.result1", SAMPLE_STRING);
         out.assertAttributeEquals("regex.result2", "bar1");
@@ -179,8 +166,7 @@ public class TestExtractText {
     @Test
     public void testProcessorWithNoMatches() throws Exception {
 
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty(ExtractText.MULTILINE, "true");
         testRunner.setProperty(ExtractText.DOTALL, "true");
@@ -197,9 +183,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_NO_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_NO_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_NO_MATCH).get(0);
 
         out.assertAttributeEquals("regex.result1", null);
         out.assertAttributeEquals("regex.result2", null);
@@ -214,8 +198,7 @@ public class TestExtractText {
 
     @Test(expected = java.lang.AssertionError.class)
     public void testNoCaptureGroups() throws UnsupportedEncodingException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
         testRunner.setProperty("regex.result1", ".*");
         testRunner.enqueue(SAMPLE_STRING.getBytes("UTF-8"));
         testRunner.run();
@@ -223,8 +206,7 @@ public class TestExtractText {
 
     @Test
     public void testNoFlowFile() throws UnsupportedEncodingException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
         testRunner.run();
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 0);
 
@@ -232,8 +214,7 @@ public class TestExtractText {
 
     @Test
     public void testMatchOutsideBuffer() throws Exception {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new ExtractText());
+        final TestRunner testRunner = TestRunners.newTestRunner(new ExtractText());
 
         testRunner.setProperty(ExtractText.MAX_BUFFER_SIZE, "3 B");//only read the first 3 chars ("foo")
 
@@ -244,9 +225,7 @@ public class TestExtractText {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(ExtractText.REL_MATCH, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(ExtractText.REL_MATCH).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(ExtractText.REL_MATCH).get(0);
 
         out.assertAttributeEquals("regex.result1", "foo");
         out.assertAttributeEquals("regex.result2", null); // null because outsk
@@ -267,63 +246,53 @@ public class TestExtractText {
         // UNIX_LINES
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.UNIX_LINES, "true");
-        assertEquals(Pattern.UNIX_LINES, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.UNIX_LINES, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // CASE_INSENSITIVE
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.CASE_INSENSITIVE, "true");
-        assertEquals(Pattern.CASE_INSENSITIVE, processor.
-                getCompileFlags(testRunner.getProcessContext()));
+        assertEquals(Pattern.CASE_INSENSITIVE, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // COMMENTS
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.COMMENTS, "true");
-        assertEquals(Pattern.COMMENTS, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.COMMENTS, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // MULTILINE
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.MULTILINE, "true");
-        assertEquals(Pattern.MULTILINE, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.MULTILINE, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // LITERAL
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.LITERAL, "true");
-        assertEquals(Pattern.LITERAL, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.LITERAL, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // DOTALL
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.DOTALL, "true");
-        assertEquals(Pattern.DOTALL, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.DOTALL, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // UNICODE_CASE
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.UNICODE_CASE, "true");
-        assertEquals(Pattern.UNICODE_CASE, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.UNICODE_CASE, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // CANON_EQ
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.CANON_EQ, "true");
-        assertEquals(Pattern.CANON_EQ, processor.getCompileFlags(testRunner.
-                getProcessContext()));
+        assertEquals(Pattern.CANON_EQ, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // UNICODE_CHARACTER_CLASS
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.UNICODE_CHARACTER_CLASS, "true");
-        assertEquals(Pattern.UNICODE_CHARACTER_CLASS, processor.
-                getCompileFlags(testRunner.getProcessContext()));
+        assertEquals(Pattern.UNICODE_CHARACTER_CLASS, processor.getCompileFlags(testRunner.getProcessContext()));
 
         // DOTALL and MULTILINE
         testRunner = TestRunners.newTestRunner(processor);
         testRunner.setProperty(ExtractText.DOTALL, "true");
         testRunner.setProperty(ExtractText.MULTILINE, "true");
-        assertEquals(Pattern.DOTALL | Pattern.MULTILINE, processor.
-                getCompileFlags(testRunner.getProcessContext()));
+        assertEquals(Pattern.DOTALL | Pattern.MULTILINE, processor.getCompileFlags(testRunner.getProcessContext()));
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java
index f0526d9..018cbdc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java
@@ -46,8 +46,7 @@ public class TestGetFile {
     public void testFilePickedUp() throws IOException {
         final File directory = new File("target/test/data/in");
         deleteDirectory(directory);
-        assertTrue("Unable to create test data directory " + directory.
-                getAbsolutePath(), directory.exists() || directory.mkdirs());
+        assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
 
         final File inFile = new File("src/test/resources/hello.txt");
         final Path inPath = inFile.toPath();
@@ -62,16 +61,12 @@ public class TestGetFile {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
-        final List<MockFlowFile> successFiles = runner.
-                getFlowFilesForRelationship(GetFile.REL_SUCCESS);
-        successFiles.get(0).
-                assertContentEquals("Hello, World!".getBytes("UTF-8"));
+        final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
+        successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
 
-        final String path = successFiles.get(0).
-                getAttribute("path");
+        final String path = successFiles.get(0).getAttribute("path");
         assertEquals("/", path);
-        final String absolutePath = successFiles.get(0).
-                getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
+        final String absolutePath = successFiles.get(0).getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
         assertEquals(absTargetPathStr, absolutePath);
     }
 
@@ -82,8 +77,7 @@ public class TestGetFile {
                     deleteDirectory(file);
                 }
 
-                assertTrue("Could not delete " + file.getAbsolutePath(), file.
-                        delete());
+                assertTrue("Could not delete " + file.getAbsolutePath(), file.delete());
             }
         }
     }
@@ -95,8 +89,7 @@ public class TestGetFile {
 
         final File directory = new File("target/test/data/in/" + dirStruc);
         deleteDirectory(directory);
-        assertTrue("Unable to create test data directory " + directory.
-                getAbsolutePath(), directory.exists() || directory.mkdirs());
+        assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
 
         final File inFile = new File("src/test/resources/hello.txt");
         final Path inPath = inFile.toPath();
@@ -105,15 +98,12 @@ public class TestGetFile {
         Files.copy(inPath, targetPath);
 
         final TestRunner runner = TestRunners.newTestRunner(new GetFile());
-        runner.
-                setProperty(GetFile.DIRECTORY, "target/test/data/in/${now():format('yyyy/MM/dd')}");
+        runner.setProperty(GetFile.DIRECTORY, "target/test/data/in/${now():format('yyyy/MM/dd')}");
         runner.run();
 
         runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
-        final List<MockFlowFile> successFiles = runner.
-                getFlowFilesForRelationship(GetFile.REL_SUCCESS);
-        successFiles.get(0).
-                assertContentEquals("Hello, World!".getBytes("UTF-8"));
+        final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
+        successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
     }
 
     @Test
@@ -123,16 +113,14 @@ public class TestGetFile {
 
         final File directory = new File("target/test/data/in/" + dirStruc);
         deleteDirectory(new File("target/test/data/in"));
-        assertTrue("Unable to create test data directory " + directory.
-                getAbsolutePath(), directory.exists() || directory.mkdirs());
+        assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
 
         final File inFile = new File("src/test/resources/hello.txt");
         final Path inPath = inFile.toPath();
         final File destFile = new File(directory, inFile.getName());
         final Path targetPath = destFile.toPath();
         final Path absTargetPath = targetPath.toAbsolutePath();
-        final String absTargetPathStr = absTargetPath.getParent().
-                toString() + "/";
+        final String absTargetPathStr = absTargetPath.getParent().toString() + "/";
         Files.copy(inPath, targetPath);
 
         final TestRunner runner = TestRunners.newTestRunner(new GetFile());
@@ -140,16 +128,12 @@ public class TestGetFile {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
-        final List<MockFlowFile> successFiles = runner.
-                getFlowFilesForRelationship(GetFile.REL_SUCCESS);
-        successFiles.get(0).
-                assertContentEquals("Hello, World!".getBytes("UTF-8"));
+        final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
+        successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
 
-        final String path = successFiles.get(0).
-                getAttribute("path");
+        final String path = successFiles.get(0).getAttribute("path");
         assertEquals(dirStruc, path.replace('\\', '/'));
-        final String absolutePath = successFiles.get(0).
-                getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
+        final String absolutePath = successFiles.get(0).getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
         assertEquals(absTargetPathStr, absolutePath);
     }
 
@@ -157,8 +141,7 @@ public class TestGetFile {
     public void testAttributes() throws IOException {
         final File directory = new File("target/test/data/in/");
         deleteDirectory(directory);
-        assertTrue("Unable to create test data directory " + directory.
-                getAbsolutePath(), directory.exists() || directory.mkdirs());
+        assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
 
         final File inFile = new File("src/test/resources/hello.txt");
         final Path inPath = inFile.toPath();
@@ -175,8 +158,7 @@ public class TestGetFile {
 
         boolean verifyPermissions = false;
         try {
-            Files.setPosixFilePermissions(targetPath, PosixFilePermissions.
-                    fromString("r--r-----"));
+            Files.setPosixFilePermissions(targetPath, PosixFilePermissions.fromString("r--r-----"));
             verifyPermissions = true;
         } catch (Exception donothing) {
         }
@@ -186,22 +168,19 @@ public class TestGetFile {
         runner.run();
 
         runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
-        final List<MockFlowFile> successFiles = runner.
-                getFlowFilesForRelationship(GetFile.REL_SUCCESS);
+        final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
 
         if (verifyLastModified) {
             try {
                 final DateFormat formatter = new SimpleDateFormat(GetFile.FILE_MODIFY_DATE_ATTR_FORMAT, Locale.US);
-                final Date fileModifyTime = formatter.parse(successFiles.get(0).
-                        getAttribute("file.lastModifiedTime"));
+                final Date fileModifyTime = formatter.parse(successFiles.get(0).getAttribute("file.lastModifiedTime"));
                 assertEquals(new Date(1000000000), fileModifyTime);
             } catch (ParseException e) {
                 fail();
             }
         }
         if (verifyPermissions) {
-            successFiles.get(0).
-                    assertAttributeEquals("file.permissions", "r--r-----");
+            successFiles.get(0).assertAttributeEquals("file.permissions", "r--r-----");
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetHTTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetHTTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetHTTP.java
index 7a76ffd..bd975f2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetHTTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetHTTP.java
@@ -53,10 +53,8 @@ public class TestGetHTTP {
     public static void before() {
         System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
         System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.GetHTTP", "debug");
-        System.
-                setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestGetHTTP", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.GetHTTP", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.standard.TestGetHTTP", "debug");
         File confDir = new File("conf");
         if (!confDir.exists()) {
             confDir.mkdir();
@@ -96,26 +94,21 @@ public class TestGetHTTP {
             controller.setProperty(GetHTTP.CONNECTION_TIMEOUT, "5 secs");
             controller.setProperty(GetHTTP.URL, destination);
             controller.setProperty(GetHTTP.FILENAME, "testFile");
-            controller.
-                    setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
+            controller.setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
 
             GetHTTP getHTTPProcessor = (GetHTTP) controller.getProcessor();
             assertEquals("", getHTTPProcessor.entityTagRef.get());
-            assertEquals("Thu, 01 Jan 1970 00:00:00 GMT", getHTTPProcessor.lastModifiedRef.
-                    get());
+            assertEquals("Thu, 01 Jan 1970 00:00:00 GMT", getHTTPProcessor.lastModifiedRef.get());
             controller.run(2);
 
             // verify the lastModified and entityTag are updated
             assertFalse("".equals(getHTTPProcessor.entityTagRef.get()));
-            assertFalse("Thu, 01 Jan 1970 00:00:00 GMT".
-                    equals(getHTTPProcessor.lastModifiedRef.get()));
+            assertFalse("Thu, 01 Jan 1970 00:00:00 GMT".equals(getHTTPProcessor.lastModifiedRef.get()));
             // ran twice, but got one...which is good
             controller.assertTransferCount(GetHTTP.REL_SUCCESS, 1);
 
             // verify remote.source flowfile attribute
-            controller.getFlowFilesForRelationship(GetHTTP.REL_SUCCESS).
-                    get(0).
-                    assertAttributeEquals("gethttp.remote.source", "localhost");
+            controller.getFlowFilesForRelationship(GetHTTP.REL_SUCCESS).get(0).assertAttributeEquals("gethttp.remote.source", "localhost");
 
             controller.clearTransferState();
 
@@ -153,8 +146,7 @@ public class TestGetHTTP {
             // turn off checking for Etag, turn on checking for lastModified, but change value
             RESTServiceContentModified.IGNORE_LAST_MODIFIED = false;
             RESTServiceContentModified.IGNORE_ETAG = true;
-            RESTServiceContentModified.modificationDate = System.
-                    currentTimeMillis() / 1000 * 1000 + 5000;
+            RESTServiceContentModified.modificationDate = System.currentTimeMillis() / 1000 * 1000 + 5000;
             String lastMod = getHTTPProcessor.lastModifiedRef.get();
             controller.run(2);
             // ran twice, got 1...but should have new cached etag
@@ -196,14 +188,12 @@ public class TestGetHTTP {
             controller.setProperty(GetHTTP.CONNECTION_TIMEOUT, "5 secs");
             controller.setProperty(GetHTTP.FILENAME, "testFile");
             controller.setProperty(GetHTTP.URL, destination);
-            controller.
-                    setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
+            controller.setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
 
             GetHTTP getHTTPProcessor = (GetHTTP) controller.getProcessor();
 
             assertEquals("", getHTTPProcessor.entityTagRef.get());
-            assertEquals("Thu, 01 Jan 1970 00:00:00 GMT", getHTTPProcessor.lastModifiedRef.
-                    get());
+            assertEquals("Thu, 01 Jan 1970 00:00:00 GMT", getHTTPProcessor.lastModifiedRef.get());
             controller.run(2);
 
             // verify the lastModified and entityTag are updated
@@ -226,9 +216,7 @@ public class TestGetHTTP {
             assertEquals(etag, props.getProperty(GetHTTP.ETAG));
             assertEquals(lastMod, props.getProperty(GetHTTP.LAST_MODIFIED));
 
-            ProcessorInitializationContext pic = new MockProcessorInitializationContext(controller.
-                    getProcessor(),
-                    (MockProcessContext) controller.getProcessContext());
+            ProcessorInitializationContext pic = new MockProcessorInitializationContext(controller.getProcessor(), (MockProcessContext) controller.getProcessContext());
             // init causes read from file
             getHTTPProcessor.init(pic);
             assertEquals(etag, getHTTPProcessor.entityTagRef.get());
@@ -274,8 +262,7 @@ public class TestGetHTTP {
             controller.setProperty(GetHTTP.CONNECTION_TIMEOUT, "5 secs");
             controller.setProperty(GetHTTP.URL, destination);
             controller.setProperty(GetHTTP.FILENAME, "testFile");
-            controller.
-                    setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
+            controller.setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
 
             controller.run();
             controller.assertTransferCount(GetHTTP.REL_SUCCESS, 0);
@@ -292,15 +279,11 @@ public class TestGetHTTP {
 
     private Map<String, String> getSslProperties() {
         Map<String, String> props = new HashMap<String, String>();
-        props.
-                put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
-        props.
-                put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
+        props.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks");
+        props.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest");
         props.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS");
-        props.
-                put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
-        props.
-                put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
+        props.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks");
+        props.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest");
         props.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS");
         return props;
     }
@@ -308,8 +291,7 @@ public class TestGetHTTP {
     private void useSSLContextService() {
         final SSLContextService service = new StandardSSLContextService();
         try {
-            controller.
-                    addControllerService("ssl-service", service, getSslProperties());
+            controller.addControllerService("ssl-service", service, getSslProperties());
             controller.enableControllerService(service);
         } catch (InitializationException ex) {
             ex.printStackTrace();
@@ -341,14 +323,11 @@ public class TestGetHTTP {
             controller.setProperty(GetHTTP.CONNECTION_TIMEOUT, "5 secs");
             controller.setProperty(GetHTTP.URL, destination);
             controller.setProperty(GetHTTP.FILENAME, "testFile");
-            controller.
-                    setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
+            controller.setProperty(GetHTTP.ACCEPT_CONTENT_TYPE, "application/json");
 
             controller.run();
             controller.assertAllFlowFilesTransferred(GetHTTP.REL_SUCCESS, 1);
-            final MockFlowFile mff = controller.
-                    getFlowFilesForRelationship(GetHTTP.REL_SUCCESS).
-                    get(0);
+            final MockFlowFile mff = controller.getFlowFilesForRelationship(GetHTTP.REL_SUCCESS).get(0);
             mff.assertContentEquals("Hello, World!");
         } finally {
             server.shutdownServer();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetJMSQueue.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetJMSQueue.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetJMSQueue.java
index b6c79d5..9c833f5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetJMSQueue.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetJMSQueue.java
@@ -36,16 +36,12 @@ public class TestGetJMSQueue {
     @org.junit.Ignore
     public void testSendTextToQueue() throws Exception {
         final TestRunner runner = TestRunners.newTestRunner(GetJMSQueue.class);
-        runner.
-                setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
+        runner.setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
         runner.setProperty(JmsProperties.URL, "tcp://localhost:61616");
-        runner.
-                setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
+        runner.setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
         runner.setProperty(JmsProperties.DESTINATION_NAME, "queue.testing");
-        runner.
-                setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
-        WrappedMessageProducer wrappedProducer = JmsFactory.
-                createMessageProducer(runner.getProcessContext(), true);
+        runner.setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
+        WrappedMessageProducer wrappedProducer = JmsFactory.createMessageProducer(runner.getProcessContext(), true);
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
@@ -60,16 +56,12 @@ public class TestGetJMSQueue {
     @org.junit.Ignore
     public void testSendBytesToQueue() throws Exception {
         final TestRunner runner = TestRunners.newTestRunner(GetJMSQueue.class);
-        runner.
-                setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
+        runner.setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
         runner.setProperty(JmsProperties.URL, "tcp://localhost:61616");
-        runner.
-                setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
+        runner.setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
         runner.setProperty(JmsProperties.DESTINATION_NAME, "queue.testing");
-        runner.
-                setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
-        WrappedMessageProducer wrappedProducer = JmsFactory.
-                createMessageProducer(runner.getProcessContext(), true);
+        runner.setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
+        WrappedMessageProducer wrappedProducer = JmsFactory.createMessageProducer(runner.getProcessContext(), true);
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
@@ -85,16 +77,12 @@ public class TestGetJMSQueue {
     @org.junit.Ignore
     public void testSendStreamToQueue() throws Exception {
         final TestRunner runner = TestRunners.newTestRunner(GetJMSQueue.class);
-        runner.
-                setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
+        runner.setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
         runner.setProperty(JmsProperties.URL, "tcp://localhost:61616");
-        runner.
-                setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
+        runner.setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
         runner.setProperty(JmsProperties.DESTINATION_NAME, "queue.testing");
-        runner.
-                setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
-        WrappedMessageProducer wrappedProducer = JmsFactory.
-                createMessageProducer(runner.getProcessContext(), true);
+        runner.setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
+        WrappedMessageProducer wrappedProducer = JmsFactory.createMessageProducer(runner.getProcessContext(), true);
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
@@ -110,16 +98,12 @@ public class TestGetJMSQueue {
     @org.junit.Ignore
     public void testSendMapToQueue() throws Exception {
         final TestRunner runner = TestRunners.newTestRunner(GetJMSQueue.class);
-        runner.
-                setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
+        runner.setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
         runner.setProperty(JmsProperties.URL, "tcp://localhost:61616");
-        runner.
-                setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
+        runner.setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
         runner.setProperty(JmsProperties.DESTINATION_NAME, "queue.testing");
-        runner.
-                setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
-        WrappedMessageProducer wrappedProducer = JmsFactory.
-                createMessageProducer(runner.getProcessContext(), true);
+        runner.setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
+        WrappedMessageProducer wrappedProducer = JmsFactory.createMessageProducer(runner.getProcessContext(), true);
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
@@ -136,22 +120,17 @@ public class TestGetJMSQueue {
     @org.junit.Ignore
     public void testSendObjectToQueue() throws Exception {
         final TestRunner runner = TestRunners.newTestRunner(GetJMSQueue.class);
-        runner.
-                setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
+        runner.setProperty(JmsProperties.JMS_PROVIDER, JmsProperties.ACTIVEMQ_PROVIDER);
         runner.setProperty(JmsProperties.URL, "tcp://localhost:61616");
-        runner.
-                setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
+        runner.setProperty(JmsProperties.DESTINATION_TYPE, JmsProperties.DESTINATION_TYPE_QUEUE);
         runner.setProperty(JmsProperties.DESTINATION_NAME, "queue.testing");
-        runner.
-                setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
-        WrappedMessageProducer wrappedProducer = JmsFactory.
-                createMessageProducer(runner.getProcessContext(), true);
+        runner.setProperty(JmsProperties.ACKNOWLEDGEMENT_MODE, JmsProperties.ACK_MODE_AUTO);
+        WrappedMessageProducer wrappedProducer = JmsFactory.createMessageProducer(runner.getProcessContext(), true);
         final Session jmsSession = wrappedProducer.getSession();
         final MessageProducer producer = wrappedProducer.getProducer();
 
         // Revision class is used because test just needs any Serializable class in core NiFi
-        final ObjectMessage message = jmsSession.
-                createObjectMessage(new Revision(1L, "ID"));
+        final ObjectMessage message = jmsSession.createObjectMessage(new Revision(1L, "ID"));
 
         producer.send(message);
         jmsSession.commit();


[12/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/d29a2d68
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/d29a2d68
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/d29a2d68

Branch: refs/heads/NIFI-292
Commit: d29a2d688e437bae42c12115768cdb038b7406c5
Parents: 5481889
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 11:54:36 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 11:54:36 2015 -0400

----------------------------------------------------------------------
 .../standard/AbstractJsonPathProcessor.java     |  11 +-
 .../standard/ConvertCharacterSet.java           |   2 +-
 .../nifi/processors/standard/HashAttribute.java |  34 +-
 .../nifi/processors/standard/PutEmail.java      | 302 +++++++---------
 .../apache/nifi/processors/standard/PutFTP.java |  24 +-
 .../nifi/processors/standard/PutFile.java       | 275 ++++++--------
 .../processors/standard/PutFileTransfer.java    | 140 +++-----
 .../apache/nifi/processors/standard/PutJMS.java | 161 +++------
 .../nifi/processors/standard/ReplaceText.java   | 169 ++++-----
 .../standard/ReplaceTextWithMapping.java        | 231 +++++-------
 .../processors/standard/RouteOnAttribute.java   | 136 +++----
 .../processors/standard/RouteOnContent.java     | 147 +++-----
 .../nifi/processors/standard/ScanAttribute.java | 121 +++----
 .../nifi/processors/standard/ScanContent.java   |  86 ++---
 .../processors/standard/SegmentContent.java     |  54 ++-
 .../nifi/processors/standard/SplitContent.java  | 125 +++----
 .../nifi/processors/standard/SplitJson.java     |  77 ++--
 .../nifi/processors/standard/SplitText.java     | 142 ++++----
 .../nifi/processors/standard/SplitXml.java      |  70 ++--
 .../nifi/processors/standard/TransformXml.java  |  87 ++---
 .../nifi/processors/standard/UnpackContent.java | 215 +++++------
 .../servlets/ContentAcknowledgmentServlet.java  |  55 +--
 .../standard/servlets/ListenHTTPServlet.java    | 142 +++-----
 .../nifi/processors/standard/util/Bin.java      |  22 +-
 .../processors/standard/util/BinManager.java    |  41 +--
 .../standard/util/DocumentReaderCallback.java   |   6 +-
 .../processors/standard/util/FTPTransfer.java   | 351 +++++++-----------
 .../nifi/processors/standard/util/FTPUtils.java |  95 ++---
 .../nifi/processors/standard/util/FileInfo.java |   3 +-
 .../processors/standard/util/FileTransfer.java  | 356 +++++++++----------
 .../processors/standard/util/JmsFactory.java    | 128 +++----
 .../processors/standard/util/JmsProperties.java | 256 ++++++-------
 .../util/JsonPathExpressionValidator.java       |  27 +-
 .../standard/util/NLKBufferedReader.java        |  14 +-
 .../processors/standard/util/SFTPTransfer.java  | 351 +++++++-----------
 .../processors/standard/util/SFTPUtils.java     | 167 ++++-----
 .../standard/util/UDPStreamConsumer.java        |  25 +-
 .../util/ValidatingBase32InputStream.java       |   3 +-
 .../util/ValidatingBase64InputStream.java       |   3 +-
 .../standard/util/WrappedMessageConsumer.java   |   9 +-
 .../standard/util/WrappedMessageProducer.java   |   9 +-
 .../src/test/java/TestIngestAndUpdate.java      |   3 +-
 .../processors/standard/CaptureServlet.java     |   3 +-
 .../standard/RESTServiceContentModified.java    |  15 +-
 .../standard/TestBase64EncodeContent.java       |  42 +--
 .../standard/TestCompressContent.java           |  85 ++---
 .../processors/standard/TestControlRate.java    |   3 +-
 .../standard/TestConvertCharacterSet.java       |  13 +-
 .../standard/TestDetectDuplicate.java           |  33 +-
 .../processors/standard/TestDistributeLoad.java |  19 +-
 .../processors/standard/TestEncodeContent.java  |  66 ++--
 .../processors/standard/TestEncryptContent.java |  30 +-
 .../standard/TestEvaluateJsonPath.java          | 219 ++++--------
 .../processors/standard/TestEvaluateXPath.java  | 106 ++----
 .../processors/standard/TestEvaluateXQuery.java | 312 +++++-----------
 .../processors/standard/TestExecuteProcess.java |  18 +-
 .../standard/TestExecuteStreamCommand.java      | 135 +++----
 .../processors/standard/TestExtractText.java    |  81 ++---
 .../nifi/processors/standard/TestGetFile.java   |  63 ++--
 .../nifi/processors/standard/TestGetHTTP.java   |  57 +--
 .../processors/standard/TestGetJMSQueue.java    |  63 ++--
 .../standard/TestHandleHttpRequest.java         |  19 +-
 .../standard/TestHandleHttpResponse.java        |  81 ++---
 .../processors/standard/TestHashAttribute.java  |   8 +-
 .../processors/standard/TestHashContent.java    |   5 +-
 .../standard/TestIdentifyMimeType.java          |  16 +-
 .../processors/standard/TestInvokeHTTP.java     | 137 ++-----
 .../processors/standard/TestJmsConsumer.java    |  88 ++---
 .../nifi/processors/standard/TestListenUDP.java |  39 +-
 .../processors/standard/TestMergeContent.java   | 176 +++------
 .../processors/standard/TestModifyBytes.java    |  82 ++---
 .../standard/TestMonitorActivity.java           |  84 ++---
 .../nifi/processors/standard/TestPostHTTP.java  | 102 ++----
 .../nifi/processors/standard/TestPutEmail.java  |  17 +-
 .../processors/standard/TestReplaceText.java    |  81 ++---
 .../standard/TestReplaceTextLineByLine.java     | 204 ++++-------
 .../standard/TestReplaceTextWithMapping.java    | 316 +++++-----------
 .../standard/TestRouteOnAttribute.java          |  65 ++--
 .../processors/standard/TestRouteOnContent.java |  19 +-
 .../processors/standard/TestScanAttribute.java  |  16 +-
 .../processors/standard/TestScanContent.java    |  34 +-
 .../processors/standard/TestSegmentContent.java |  14 +-
 .../nifi/processors/standard/TestServer.java    |  21 +-
 .../processors/standard/TestSplitContent.java   | 196 ++++------
 .../nifi/processors/standard/TestSplitJson.java |  91 ++---
 .../nifi/processors/standard/TestSplitText.java |  80 ++---
 .../nifi/processors/standard/TestSplitXml.java  |   4 +-
 .../processors/standard/TestTransformXml.java   |  45 +--
 .../processors/standard/TestUnpackContent.java  | 131 +++----
 .../processors/standard/TestValidateXml.java    |   4 +-
 .../standard/UserAgentTestingServlet.java       |   1 -
 91 files changed, 2933 insertions(+), 5281 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java
index 9e77dab..d03240e 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java
@@ -42,8 +42,7 @@ import java.util.Map;
 import java.util.Objects;
 
 /**
- * Provides common functionality used for processors interacting and
- * manipulating JSON data via JsonPath.
+ * Provides common functionality used for processors interacting and manipulating JSON data via JsonPath.
  *
  * @see <a href="http://json.org">http://json.org</a>
  * @see
@@ -90,9 +89,8 @@ public abstract class AbstractJsonPathProcessor extends AbstractProcessor {
     }
 
     /**
-     * Determines the context by which JsonSmartJsonProvider would treat the
-     * value. {@link java.util.Map} and {@link java.util.List} objects can be
-     * rendered as JSON elements, everything else is treated as a scalar.
+     * Determines the context by which JsonSmartJsonProvider would treat the value. {@link java.util.Map} and {@link java.util.List} objects can be rendered as JSON elements, everything else is
+     * treated as a scalar.
      *
      * @param obj item to be inspected if it is a scalar or a JSON element
      * @return false, if the object is a supported type; true otherwise
@@ -131,8 +129,7 @@ public abstract class AbstractJsonPathProcessor extends AbstractProcessor {
         abstract void cacheComputedValue(String subject, String input, JsonPath computedJsonPath);
 
         /**
-         * A hook for implementing classes to determine if a cached value is
-         * stale for a compiled JsonPath represented by either a validation
+         * A hook for implementing classes to determine if a cached value is stale for a compiled JsonPath represented by either a validation
          */
         abstract boolean isStale(String subject, String input);
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index c8d22d3..ec61370 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -159,7 +159,7 @@ public class ConvertCharacterSet extends AbstractProcessor {
             });
 
             session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-            logger.info("successfully converted characters from {} to {} for {}", 
+            logger.info("successfully converted characters from {} to {} for {}",
                     new Object[]{context.getProperty(INPUT_CHARSET).getValue(), context.getProperty(OUTPUT_CHARSET).getValue(), flowFile});
             session.transfer(flowFile, REL_SUCCESS);
         } catch (final Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
index 9187aad..314f1c7 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
@@ -50,30 +50,21 @@ import org.apache.nifi.processor.util.StandardValidators;
 
 /**
  * <p>
- * This processor identifies groups of user-specified flowfile attributes and
- * assigns a unique hash value to each group, recording this hash value in the
- * flowfile's attributes using a user-specified attribute key. The groups are
- * identified dynamically and preserved across application restarts. </p>
+ * This processor identifies groups of user-specified flowfile attributes and assigns a unique hash value to each group, recording this hash value in the flowfile's attributes using a user-specified
+ * attribute key. The groups are identified dynamically and preserved across application restarts. </p>
  *
  * <p>
- * The user must supply optional processor properties during runtime to
- * correctly configure this processor. The optional property key will be used as
- * the flowfile attribute key for attribute inspection. The value must be a
- * valid regular expression. This regular expression is evaluated against the
- * flowfile attribute values. If the regular expression contains a capturing
- * group, the value of that group will be used when comparing flow file
- * attributes. Otherwise, the original flow file attribute's value will be used
- * if and only if the value matches the given regular expression. </p>
+ * The user must supply optional processor properties during runtime to correctly configure this processor. The optional property key will be used as the flowfile attribute key for attribute
+ * inspection. The value must be a valid regular expression. This regular expression is evaluated against the flowfile attribute values. If the regular expression contains a capturing group, the value
+ * of that group will be used when comparing flow file attributes. Otherwise, the original flow file attribute's value will be used if and only if the value matches the given regular expression. </p>
  *
  * <p>
- * If a flowfile does not have an attribute entry for one or more processor
- * configured values, then the flowfile is routed to failure. </p>
+ * If a flowfile does not have an attribute entry for one or more processor configured values, then the flowfile is routed to failure. </p>
  *
  * <p>
  * An example hash value identification:
  *
- * Assume Processor Configured with Two Properties ("MDKey1" = ".*" and "MDKey2"
- * = "(.).*").
+ * Assume Processor Configured with Two Properties ("MDKey1" = ".*" and "MDKey2" = "(.).*").
  *
  * FlowFile 1 has the following attributes: MDKey1 = a MDKey2 = b
  *
@@ -89,17 +80,12 @@ import org.apache.nifi.processor.util.StandardValidators;
  *
  * FlowFile 4 has the following attribute: MDKey1 = a MDKey2 = bad
  *
- * and will be assigned to group 1 (because the value of MDKey1 has the regular
- * expression ".*" applied to it, and that evaluates to the same as MDKey1
- * attribute of the first flow file. Similarly, the capturing group for the
- * MDKey2 property indicates that only the first character of the MDKey2
- * attribute must match, and the first character of MDKey2 for Flow File 1 and
- * Flow File 4 are both 'b'.)
+ * and will be assigned to group 1 (because the value of MDKey1 has the regular expression ".*" applied to it, and that evaluates to the same as MDKey1 attribute of the first flow file. Similarly, the
+ * capturing group for the MDKey2 property indicates that only the first character of the MDKey2 attribute must match, and the first character of MDKey2 for Flow File 1 and Flow File 4 are both 'b'.)
  *
  * FlowFile 5 has the following attributes: MDKey1 = a
  *
- * and will route to failure because it does not have MDKey2 entry in its
- * attribute
+ * and will route to failure because it does not have MDKey2 entry in its attribute
  * </p>
  *
  * <p>

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
index 8cad06f..8efc563 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
@@ -124,96 +124,95 @@ public class PutEmail extends AbstractProcessor {
             .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .defaultValue("javax.net.ssl.SSLSocketFactory")
             .build();
-    public static final PropertyDescriptor HEADER_XMAILER = new PropertyDescriptor.Builder().
-            name("SMTP X-Mailer Header").
-            description("X-Mailer used in the header of the outgoing email").
-            required(true).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            defaultValue("NiFi").
-            build();
-    public static final PropertyDescriptor CONTENT_TYPE = new PropertyDescriptor.Builder().
-            name("Content Type").
-            description("Mime Type used to interpret the contents of the email, such as text/plain or text/html").
-            required(true).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            defaultValue("text/plain").
-            build();
-    public static final PropertyDescriptor FROM = new PropertyDescriptor.Builder().
-            name("From").
-            description("Specifies the Email address to use as the sender").
-            required(true).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
+    public static final PropertyDescriptor HEADER_XMAILER = new PropertyDescriptor.Builder()
+            .name("SMTP X-Mailer Header")
+            .description("X-Mailer used in the header of the outgoing email")
+            .required(true)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .defaultValue("NiFi")
+            .build();
+    public static final PropertyDescriptor CONTENT_TYPE = new PropertyDescriptor.Builder()
+            .name("Content Type")
+            .description("Mime Type used to interpret the contents of the email, such as text/plain or text/html")
+            .required(true)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .defaultValue("text/plain")
+            .build();
+    public static final PropertyDescriptor FROM = new PropertyDescriptor.Builder()
+            .name("From")
+            .description("Specifies the Email address to use as the sender")
+            .required(true)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
     public static final PropertyDescriptor TO = new PropertyDescriptor.Builder()
-            .name("To").
-            description("The recipients to include in the To-Line of the email").
-            required(false).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
+            .name("To")
+            .description("The recipients to include in the To-Line of the email")
+            .required(false)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
     public static final PropertyDescriptor CC = new PropertyDescriptor.Builder()
-            .name("CC").
-            description("The recipients to include in the CC-Line of the email").
-            required(false).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor BCC = new PropertyDescriptor.Builder().
-            name("BCC").
-            description("The recipients to include in the BCC-Line of the email").
-            required(false).
-            expressionLanguageSupported(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor SUBJECT = new PropertyDescriptor.Builder().
-            name("Subject").
-            description("The email subject").
-            required(true).
-            expressionLanguageSupported(true).
-            defaultValue("Message from NiFi").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor MESSAGE = new PropertyDescriptor.Builder().
-            name("Message").
-            description("The body of the email message").
-            required(true).
-            expressionLanguageSupported(true).
-            defaultValue("").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor ATTACH_FILE = new PropertyDescriptor.Builder().
-            name("Attach File").
-            description("Specifies whether or not the FlowFile content should be attached to the email").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("false").
-            build();
-    public static final PropertyDescriptor INCLUDE_ALL_ATTRIBUTES = new PropertyDescriptor.Builder().
-            name("Include All Attributes In Message").
-            description("Specifies whether or not all FlowFile attributes should be recorded in the body of the email message").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("false").
-            build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("FlowFiles that are successfully sent will be routed to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("FlowFiles that fail to send will be routed to this relationship").
-            build();
+            .name("CC")
+            .description("The recipients to include in the CC-Line of the email")
+            .required(false)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor BCC = new PropertyDescriptor.Builder()
+            .name("BCC")
+            .description("The recipients to include in the BCC-Line of the email")
+            .required(false)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor SUBJECT = new PropertyDescriptor.Builder()
+            .name("Subject")
+            .description("The email subject")
+            .required(true)
+            .expressionLanguageSupported(true)
+            .defaultValue("Message from NiFi")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor MESSAGE = new PropertyDescriptor.Builder()
+            .name("Message")
+            .description("The body of the email message")
+            .required(true)
+            .expressionLanguageSupported(true)
+            .defaultValue("")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor ATTACH_FILE = new PropertyDescriptor.Builder()
+            .name("Attach File")
+            .description("Specifies whether or not the FlowFile content should be attached to the email")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .build();
+    public static final PropertyDescriptor INCLUDE_ALL_ATTRIBUTES = new PropertyDescriptor.Builder()
+            .name("Include All Attributes In Message")
+            .description("Specifies whether or not all FlowFile attributes should be recorded in the body of the email message")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("FlowFiles that are successfully sent will be routed to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("FlowFiles that fail to send will be routed to this relationship")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
 
     /**
-     * Mapping of the mail properties to the NiFi PropertyDescriptors that will
-     * be evaluated at runtime
+     * Mapping of the mail properties to the NiFi PropertyDescriptors that will be evaluated at runtime
      */
     private static final Map<String, PropertyDescriptor> propertyToContext = new HashMap<>();
 
@@ -221,8 +220,7 @@ public class PutEmail extends AbstractProcessor {
         propertyToContext.put("mail.smtp.host", SMTP_HOSTNAME);
         propertyToContext.put("mail.smtp.port", SMTP_PORT);
         propertyToContext.put("mail.smtp.socketFactory.port", SMTP_PORT);
-        propertyToContext.
-                put("mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY);
+        propertyToContext.put("mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY);
         propertyToContext.put("mail.smtp.auth", SMTP_AUTH);
         propertyToContext.put("mail.smtp.starttls.enable", SMTP_TLS);
         propertyToContext.put("mail.smtp.user", SMTP_USERNAME);
@@ -269,21 +267,15 @@ public class PutEmail extends AbstractProcessor {
 
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> errors = new ArrayList<>(super.
-                customValidate(context));
+        final List<ValidationResult> errors = new ArrayList<>(super.customValidate(context));
 
-        final String to = context.getProperty(TO).
-                getValue();
-        final String cc = context.getProperty(CC).
-                getValue();
-        final String bcc = context.getProperty(BCC).
-                getValue();
+        final String to = context.getProperty(TO).getValue();
+        final String cc = context.getProperty(CC).getValue();
+        final String bcc = context.getProperty(BCC).getValue();
 
         if (to == null && cc == null && bcc == null) {
             errors.add(new ValidationResult.Builder().subject("To, CC, BCC").
-                    valid(false).
-                    explanation("Must specify at least one To/CC/BCC address").
-                    build());
+                    valid(false).explanation("Must specify at least one To/CC/BCC address").build());
         }
 
         return errors;
@@ -296,8 +288,7 @@ public class PutEmail extends AbstractProcessor {
             return;
         }
 
-        final Properties properties = this.
-                getMailPropertiesFromFlowFile(context, flowFile);
+        final Properties properties = this.getMailPropertiesFromFlowFile(context, flowFile);
 
         final Session mailSession = this.createMailSession(properties);
 
@@ -305,71 +296,46 @@ public class PutEmail extends AbstractProcessor {
         final ProcessorLog logger = getLogger();
 
         try {
-            message.setFrom(InternetAddress.parse(context.getProperty(FROM).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue())[0]);
-
-            final InternetAddress[] toAddresses = toInetAddresses(context.
-                    getProperty(TO).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
+            message.setFrom(InternetAddress.parse(context.getProperty(FROM).evaluateAttributeExpressions(flowFile).getValue())[0]);
+
+            final InternetAddress[] toAddresses = toInetAddresses(context.getProperty(TO).evaluateAttributeExpressions(flowFile).getValue());
             message.setRecipients(RecipientType.TO, toAddresses);
 
-            final InternetAddress[] ccAddresses = toInetAddresses(context.
-                    getProperty(CC).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
+            final InternetAddress[] ccAddresses = toInetAddresses(context.getProperty(CC).evaluateAttributeExpressions(flowFile).getValue());
             message.setRecipients(RecipientType.CC, ccAddresses);
 
-            final InternetAddress[] bccAddresses = toInetAddresses(context.
-                    getProperty(BCC).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
+            final InternetAddress[] bccAddresses = toInetAddresses(context.getProperty(BCC).evaluateAttributeExpressions(flowFile).getValue());
             message.setRecipients(RecipientType.BCC, bccAddresses);
 
-            message.setHeader("X-Mailer", context.getProperty(HEADER_XMAILER).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
-            message.setSubject(context.getProperty(SUBJECT).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
-            String messageText = context.getProperty(MESSAGE).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
-
-            if (context.getProperty(INCLUDE_ALL_ATTRIBUTES).
-                    asBoolean()) {
+            message.setHeader("X-Mailer", context.getProperty(HEADER_XMAILER).evaluateAttributeExpressions(flowFile).getValue());
+            message.setSubject(context.getProperty(SUBJECT).evaluateAttributeExpressions(flowFile).getValue());
+            String messageText = context.getProperty(MESSAGE).evaluateAttributeExpressions(flowFile).getValue();
+
+            if (context.getProperty(INCLUDE_ALL_ATTRIBUTES).asBoolean()) {
                 messageText = formatAttributes(flowFile, messageText);
             }
 
-            String contentType = context.getProperty(CONTENT_TYPE).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
+            String contentType = context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions(flowFile).getValue();
             message.setContent(messageText, contentType);
             message.setSentDate(new Date());
 
-            if (context.getProperty(ATTACH_FILE).
-                    asBoolean()) {
+            if (context.getProperty(ATTACH_FILE).asBoolean()) {
                 final MimeBodyPart mimeText = new PreencodedMimeBodyPart("base64");
-                mimeText.
-                        setDataHandler(new DataHandler(new ByteArrayDataSource(Base64.
-                                                encodeBase64(messageText.
-                                                        getBytes("UTF-8")), "text/plain; charset=\"utf-8\"")));
+                mimeText.setDataHandler(new DataHandler(new ByteArrayDataSource(
+                        Base64.encodeBase64(messageText.getBytes("UTF-8")), "text/plain; charset=\"utf-8\"")));
                 final MimeBodyPart mimeFile = new MimeBodyPart();
                 session.read(flowFile, new InputStreamCallback() {
                     @Override
                     public void process(final InputStream stream) throws IOException {
                         try {
-                            mimeFile.
-                                    setDataHandler(new DataHandler(new ByteArrayDataSource(stream, "application/octet-stream")));
+                            mimeFile.setDataHandler(new DataHandler(new ByteArrayDataSource(stream, "application/octet-stream")));
                         } catch (final Exception e) {
                             throw new IOException(e);
                         }
                     }
                 });
 
-                mimeFile.setFileName(flowFile.
-                        getAttribute(CoreAttributes.FILENAME.key()));
+                mimeFile.setFileName(flowFile.getAttribute(CoreAttributes.FILENAME.key()));
                 MimeMultipart multipart = new MimeMultipart();
                 multipart.addBodyPart(mimeText);
                 multipart.addBodyPart(mimeFile);
@@ -378,24 +344,18 @@ public class PutEmail extends AbstractProcessor {
 
             Transport.send(message);
 
-            session.getProvenanceReporter().
-                    send(flowFile, "mailto:" + message.getAllRecipients()[0].
-                            toString());
+            session.getProvenanceReporter().send(flowFile, "mailto:" + message.getAllRecipients()[0].toString());
             session.transfer(flowFile, REL_SUCCESS);
-            logger.
-                    info("Sent email as a result of receiving {}", new Object[]{flowFile});
+            logger.info("Sent email as a result of receiving {}", new Object[]{flowFile});
         } catch (final ProcessException | MessagingException | IOException e) {
             context.yield();
-            logger.
-                    error("Failed to send email for {}: {}; routing to failure", new Object[]{flowFile, e});
+            logger.error("Failed to send email for {}: {}; routing to failure", new Object[]{flowFile, e});
             session.transfer(flowFile, REL_FAILURE);
         }
     }
 
     /**
-     * Based on the input properties, determine whether an authenticate or
-     * unauthenticated session should be used. If authenticated, creates a
-     * Password Authenticator for use in sending the email.
+     * Based on the input properties, determine whether an authenticate or unauthenticated session should be used. If authenticated, creates a Password Authenticator for use in sending the email.
      *
      * @param properties mail properties
      * @return session
@@ -407,22 +367,18 @@ public class PutEmail extends AbstractProcessor {
         /*
          * Conditionally create a password authenticator if the 'auth' parameter is set.
          */
-        final Session mailSession = auth ? Session.
-                getInstance(properties, new Authenticator() {
-                    @Override
-                    public PasswordAuthentication getPasswordAuthentication() {
-                        String username = properties.
-                        getProperty("mail.smtp.user"),
-                        password = properties.getProperty("mail.smtp.password");
-                        return new PasswordAuthentication(username, password);
-                    }
-                }) : Session.getInstance(properties); // without auth
+        final Session mailSession = auth ? Session.getInstance(properties, new Authenticator() {
+            @Override
+            public PasswordAuthentication getPasswordAuthentication() {
+                String username = properties.getProperty("mail.smtp.user"), password = properties.getProperty("mail.smtp.password");
+                return new PasswordAuthentication(username, password);
+            }
+        }) : Session.getInstance(properties); // without auth
         return mailSession;
     }
 
     /**
-     * Uses the mapping of javax.mail properties to NiFi PropertyDescriptors to
-     * build the required Properties object to be used for sending this email
+     * Uses the mapping of javax.mail properties to NiFi PropertyDescriptors to build the required Properties object to be used for sending this email
      *
      * @param context context
      * @param flowFile flowFile
@@ -438,14 +394,11 @@ public class PutEmail extends AbstractProcessor {
                 entrySet()) {
 
             // Evaluate the property descriptor against the flow file
-            String flowFileValue = context.getProperty(entry.getValue()).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
+            String flowFileValue = context.getProperty(entry.getValue()).evaluateAttributeExpressions(flowFile).getValue();
 
             String property = entry.getKey();
 
-            logger.
-                    debug("Evaluated Mail Property: {} with Value: {}", new Object[]{property, flowFileValue});
+            logger.debug("Evaluated Mail Property: {} with Value: {}", new Object[]{property, flowFileValue});
 
             // Nullable values are not allowed, so filter out
             if (null != flowFileValue) {
@@ -464,19 +417,12 @@ public class PutEmail extends AbstractProcessor {
         StringBuilder message = new StringBuilder(messagePrepend);
         message.append(BODY_SEPARATOR);
         message.append("\nStandard FlowFile Metadata:");
-        message.append(String.
-                format("\n\t%1$s = '%2$s'", "id", flowFile.getId()));
-        message.append(String.
-                format("\n\t%1$s = '%2$s'", "entryDate", new Date(flowFile.
-                                getEntryDate())));
-        message.append(String.format("\n\t%1$s = '%2$s'", "fileSize", flowFile.
-                getSize()));
+        message.append(String.format("\n\t%1$s = '%2$s'", "id", flowFile.getId()));
+        message.append(String.format("\n\t%1$s = '%2$s'", "entryDate", new Date(flowFile.getEntryDate())));
+        message.append(String.format("\n\t%1$s = '%2$s'", "fileSize", flowFile.getSize()));
         message.append("\nFlowFile Attributes:");
-        for (Entry<String, String> attribute : flowFile.getAttributes().
-                entrySet()) {
-            message.append(String.
-                    format("\n\t%1$s = '%2$s'", attribute.getKey(), attribute.
-                            getValue()));
+        for (Entry<String, String> attribute : flowFile.getAttributes().entrySet()) {
+            message.append(String.format("\n\t%1$s = '%2$s'", attribute.getKey(), attribute.getValue()));
         }
         message.append("\n");
         return message.toString();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
index 6786bf0..051cb07 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
@@ -104,8 +104,7 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
 
     @Override
     protected void beforePut(final FlowFile flowFile, final ProcessContext context, final FTPTransfer transfer) throws IOException {
-        transfer.
-                sendCommands(getCommands(preSendDescriptorRef.get(), context, flowFile), flowFile);
+        transfer.sendCommands(getCommands(preSendDescriptorRef.get(), context, flowFile), flowFile);
     }
 
     @Override
@@ -122,10 +121,10 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .name(propertyDescriptorName).
-                addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-                dynamic(true).
-                build();
+                .name(propertyDescriptorName)
+                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                .dynamic(true)
+                .build();
     }
 
     @OnScheduled
@@ -133,8 +132,7 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
         final Map<Integer, PropertyDescriptor> preDescriptors = new TreeMap<>();
         final Map<Integer, PropertyDescriptor> postDescriptors = new TreeMap<>();
 
-        for (final PropertyDescriptor descriptor : context.getProperties().
-                keySet()) {
+        for (final PropertyDescriptor descriptor : context.getProperties().keySet()) {
             final String name = descriptor.getName();
             final Matcher preMatcher = PRE_SEND_CMD_PATTERN.matcher(name);
             if (preMatcher.matches()) {
@@ -149,10 +147,8 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
             }
         }
 
-        final List<PropertyDescriptor> preDescriptorList = new ArrayList<>(preDescriptors.
-                values());
-        final List<PropertyDescriptor> postDescriptorList = new ArrayList<>(postDescriptors.
-                values());
+        final List<PropertyDescriptor> preDescriptorList = new ArrayList<>(preDescriptors.values());
+        final List<PropertyDescriptor> postDescriptorList = new ArrayList<>(postDescriptors.values());
         this.preSendDescriptorRef.set(preDescriptorList);
         this.postSendDescriptorRef.set(postDescriptorList);
     }
@@ -160,9 +156,7 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
     private List<String> getCommands(final List<PropertyDescriptor> descriptors, final ProcessContext context, final FlowFile flowFile) {
         final List<String> cmds = new ArrayList<>();
         for (final PropertyDescriptor descriptor : descriptors) {
-            cmds.add(context.getProperty(descriptor).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue());
+            cmds.add(context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue());
         }
 
         return cmds;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
index ce03491..3bbe093 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
@@ -64,76 +64,76 @@ public class PutFile extends AbstractProcessor {
     public static final String FILE_MODIFY_DATE_ATTRIBUTE = "file.lastModifiedTime";
     public static final String FILE_MODIFY_DATE_ATTR_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
 
-    public static final PropertyDescriptor DIRECTORY = new PropertyDescriptor.Builder().
-            name("Directory").
-            description("The directory to which files should be written. You may use expression language such as /aa/bb/${path}").
-            required(true).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor MAX_DESTINATION_FILES = new PropertyDescriptor.Builder().
-            name("Maximum File Count").
-            description("Specifies the maximum number of files that can exist in the output directory").
-            required(false).
-            addValidator(StandardValidators.INTEGER_VALIDATOR).
-            build();
-    public static final PropertyDescriptor CONFLICT_RESOLUTION = new PropertyDescriptor.Builder().
-            name("Conflict Resolution Strategy").
-            description("Indicates what should happen when a file with the same name already exists in the output directory").
-            required(true).
-            defaultValue(FAIL_RESOLUTION).
-            allowableValues(REPLACE_RESOLUTION, IGNORE_RESOLUTION, FAIL_RESOLUTION).
-            build();
-    public static final PropertyDescriptor CHANGE_LAST_MODIFIED_TIME = new PropertyDescriptor.Builder().
-            name("Last Modified Time").
-            description("Sets the lastModifiedTime on the output file to the value of this attribute.  Format must be yyyy-MM-dd'T'HH:mm:ssZ.  "
-                    + "You may also use expression language such as ${file.lastModifiedTime}.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CHANGE_PERMISSIONS = new PropertyDescriptor.Builder().
-            name("Permissions").
-            description("Sets the permissions on the output file to the value of this attribute.  Format must be either UNIX rwxrwxrwx with a - in "
+    public static final PropertyDescriptor DIRECTORY = new PropertyDescriptor.Builder()
+            .name("Directory")
+            .description("The directory to which files should be written. You may use expression language such as /aa/bb/${path}")
+            .required(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor MAX_DESTINATION_FILES = new PropertyDescriptor.Builder()
+            .name("Maximum File Count")
+            .description("Specifies the maximum number of files that can exist in the output directory")
+            .required(false)
+            .addValidator(StandardValidators.INTEGER_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor CONFLICT_RESOLUTION = new PropertyDescriptor.Builder()
+            .name("Conflict Resolution Strategy")
+            .description("Indicates what should happen when a file with the same name already exists in the output directory")
+            .required(true)
+            .defaultValue(FAIL_RESOLUTION)
+            .allowableValues(REPLACE_RESOLUTION, IGNORE_RESOLUTION, FAIL_RESOLUTION)
+            .build();
+    public static final PropertyDescriptor CHANGE_LAST_MODIFIED_TIME = new PropertyDescriptor.Builder()
+            .name("Last Modified Time")
+            .description("Sets the lastModifiedTime on the output file to the value of this attribute.  Format must be yyyy-MM-dd'T'HH:mm:ssZ.  "
+                    + "You may also use expression language such as ${file.lastModifiedTime}.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CHANGE_PERMISSIONS = new PropertyDescriptor.Builder()
+            .name("Permissions")
+            .description("Sets the permissions on the output file to the value of this attribute.  Format must be either UNIX rwxrwxrwx with a - in "
                     + "place of denied permissions (e.g. rw-r--r--) or an octal number (e.g. 644).  You may also use expression language such as "
-                    + "${file.permissions}.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CHANGE_OWNER = new PropertyDescriptor.Builder().
-            name("Owner").
-            description("Sets the owner on the output file to the value of this attribute.  You may also use expression language such as "
-                    + "${file.owner}.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CHANGE_GROUP = new PropertyDescriptor.Builder().
-            name("Group").
-            description("Sets the group on the output file to the value of this attribute.  You may also use expression language such "
-                    + "as ${file.group}.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CREATE_DIRS = new PropertyDescriptor.Builder().
-            name("Create Missing Directories").
-            description("If true, then missing destination directories will be created. If false, flowfiles are penalized and sent to failure.").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
+                    + "${file.permissions}.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CHANGE_OWNER = new PropertyDescriptor.Builder()
+            .name("Owner")
+            .description("Sets the owner on the output file to the value of this attribute.  You may also use expression language such as "
+                    + "${file.owner}.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CHANGE_GROUP = new PropertyDescriptor.Builder()
+            .name("Group")
+            .description("Sets the group on the output file to the value of this attribute.  You may also use expression language such "
+                    + "as ${file.group}.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CREATE_DIRS = new PropertyDescriptor.Builder()
+            .name("Create Missing Directories")
+            .description("If true, then missing destination directories will be created. If false, flowfiles are penalized and sent to failure.")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
 
     public static final int MAX_FILE_LOCK_ATTEMPTS = 10;
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("Files that have been successfully written to the output directory are transferred to this relationship").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("Files that could not be written to the output directory for some reason are transferred to this relationship").
-            build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("Files that have been successfully written to the output directory are transferred to this relationship")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("Files that could not be written to the output directory for some reason are transferred to this relationship")
+            .build();
 
     private List<PropertyDescriptor> properties;
     private Set<Relationship> relationships;
@@ -177,35 +177,25 @@ public class PutFile extends AbstractProcessor {
         }
 
         final StopWatch stopWatch = new StopWatch(true);
-        final Path configuredRootDirPath = Paths.get(context.
-                getProperty(DIRECTORY).
-                evaluateAttributeExpressions(flowFile).
-                getValue());
-        final String conflictResponse = context.getProperty(CONFLICT_RESOLUTION).
-                getValue();
-        final Integer maxDestinationFiles = context.
-                getProperty(MAX_DESTINATION_FILES).
-                asInteger();
+        final Path configuredRootDirPath = Paths.get(context.getProperty(DIRECTORY).evaluateAttributeExpressions(flowFile).getValue());
+        final String conflictResponse = context.getProperty(CONFLICT_RESOLUTION).getValue();
+        final Integer maxDestinationFiles = context.getProperty(MAX_DESTINATION_FILES).asInteger();
         final ProcessorLog logger = getLogger();
 
         Path tempDotCopyFile = null;
         try {
             final Path rootDirPath = configuredRootDirPath;
-            final Path tempCopyFile = rootDirPath.resolve("." + flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key()));
-            final Path copyFile = rootDirPath.resolve(flowFile.
-                    getAttribute(CoreAttributes.FILENAME.key()));
+            final Path tempCopyFile = rootDirPath.resolve("." + flowFile.getAttribute(CoreAttributes.FILENAME.key()));
+            final Path copyFile = rootDirPath.resolve(flowFile.getAttribute(CoreAttributes.FILENAME.key()));
 
             if (!Files.exists(rootDirPath)) {
-                if (context.getProperty(CREATE_DIRS).
-                        asBoolean()) {
+                if (context.getProperty(CREATE_DIRS).asBoolean()) {
                     Files.createDirectories(rootDirPath);
                 } else {
                     flowFile = session.penalize(flowFile);
                     session.transfer(flowFile, REL_FAILURE);
-                    logger.
-                            error("Penalizing {} and routing to 'failure' because the output directory {} does not exist and Processor is "
-                                    + "configured not to create missing directories", new Object[]{flowFile, rootDirPath});
+                    logger.error("Penalizing {} and routing to 'failure' because the output directory {} does not exist and Processor is "
+                            + "configured not to create missing directories", new Object[]{flowFile, rootDirPath});
                     return;
                 }
             }
@@ -216,14 +206,12 @@ public class PutFile extends AbstractProcessor {
 
             final Path finalCopyFileDir = finalCopyFile.getParent();
             if (Files.exists(finalCopyFileDir) && maxDestinationFiles != null) { // check if too many files already
-                final int numFiles = finalCopyFileDir.toFile().
-                        list().length;
+                final int numFiles = finalCopyFileDir.toFile().list().length;
 
                 if (numFiles >= maxDestinationFiles) {
                     flowFile = session.penalize(flowFile);
-                    logger.
-                            info("Penalizing {} and routing to 'failure' because the output directory {} has {} files, which exceeds the "
-                                    + "configured maximum number of files", new Object[]{flowFile, finalCopyFileDir, numFiles});
+                    logger.info("Penalizing {} and routing to 'failure' because the output directory {} has {} files, which exceeds the "
+                            + "configured maximum number of files", new Object[]{flowFile, finalCopyFileDir, numFiles});
                     session.transfer(flowFile, REL_FAILURE);
                     return;
                 }
@@ -233,18 +221,15 @@ public class PutFile extends AbstractProcessor {
                 switch (conflictResponse) {
                     case REPLACE_RESOLUTION:
                         Files.delete(finalCopyFile);
-                        logger.
-                                info("Deleted {} as configured in order to replace with the contents of {}", new Object[]{finalCopyFile, flowFile});
+                        logger.info("Deleted {} as configured in order to replace with the contents of {}", new Object[]{finalCopyFile, flowFile});
                         break;
                     case IGNORE_RESOLUTION:
                         session.transfer(flowFile, REL_SUCCESS);
-                        logger.
-                                info("Transferring {} to success because file with same name already exists", new Object[]{flowFile});
+                        logger.info("Transferring {} to success because file with same name already exists", new Object[]{flowFile});
                         return;
                     case FAIL_RESOLUTION:
                         flowFile = session.penalize(flowFile);
-                        logger.
-                                info("Penalizing {} and routing to failure as configured because file with the same name already exists", new Object[]{flowFile});
+                        logger.info("Penalizing {} and routing to failure as configured because file with the same name already exists", new Object[]{flowFile});
                         session.transfer(flowFile, REL_FAILURE);
                         return;
                     default:
@@ -254,82 +239,53 @@ public class PutFile extends AbstractProcessor {
 
             session.exportTo(flowFile, dotCopyFile, false);
 
-            final String lastModifiedTime = context.
-                    getProperty(CHANGE_LAST_MODIFIED_TIME).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
-            if (lastModifiedTime != null && !lastModifiedTime.trim().
-                    isEmpty()) {
+            final String lastModifiedTime = context.getProperty(CHANGE_LAST_MODIFIED_TIME).evaluateAttributeExpressions(flowFile).getValue();
+            if (lastModifiedTime != null && !lastModifiedTime.trim().isEmpty()) {
                 try {
                     final DateFormat formatter = new SimpleDateFormat(FILE_MODIFY_DATE_ATTR_FORMAT, Locale.US);
-                    final Date fileModifyTime = formatter.
-                            parse(lastModifiedTime);
-                    dotCopyFile.toFile().
-                            setLastModified(fileModifyTime.getTime());
+                    final Date fileModifyTime = formatter.parse(lastModifiedTime);
+                    dotCopyFile.toFile().setLastModified(fileModifyTime.getTime());
                 } catch (Exception e) {
-                    logger.
-                            warn("Could not set file lastModifiedTime to {} because {}", new Object[]{lastModifiedTime, e});
+                    logger.warn("Could not set file lastModifiedTime to {} because {}", new Object[]{lastModifiedTime, e});
                 }
             }
 
-            final String permissions = context.getProperty(CHANGE_PERMISSIONS).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
-            if (permissions != null && !permissions.trim().
-                    isEmpty()) {
+            final String permissions = context.getProperty(CHANGE_PERMISSIONS).evaluateAttributeExpressions(flowFile).getValue();
+            if (permissions != null && !permissions.trim().isEmpty()) {
                 try {
                     String perms = stringPermissions(permissions);
                     if (!perms.isEmpty()) {
-                        Files.
-                                setPosixFilePermissions(dotCopyFile, PosixFilePermissions.
-                                        fromString(perms));
+                        Files.setPosixFilePermissions(dotCopyFile, PosixFilePermissions.fromString(perms));
                     }
                 } catch (Exception e) {
-                    logger.
-                            warn("Could not set file permissions to {} because {}", new Object[]{permissions, e});
+                    logger.warn("Could not set file permissions to {} because {}", new Object[]{permissions, e});
                 }
             }
 
-            final String owner = context.getProperty(CHANGE_OWNER).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
-            if (owner != null && !owner.trim().
-                    isEmpty()) {
+            final String owner = context.getProperty(CHANGE_OWNER).evaluateAttributeExpressions(flowFile).getValue();
+            if (owner != null && !owner.trim().isEmpty()) {
                 try {
-                    UserPrincipalLookupService lookupService = dotCopyFile.
-                            getFileSystem().
-                            getUserPrincipalLookupService();
-                    Files.setOwner(dotCopyFile, lookupService.
-                            lookupPrincipalByName(owner));
+                    UserPrincipalLookupService lookupService = dotCopyFile.getFileSystem().getUserPrincipalLookupService();
+                    Files.setOwner(dotCopyFile, lookupService.lookupPrincipalByName(owner));
                 } catch (Exception e) {
-                    logger.
-                            warn("Could not set file owner to {} because {}", new Object[]{owner, e});
+                    logger.warn("Could not set file owner to {} because {}", new Object[]{owner, e});
                 }
             }
 
-            final String group = context.getProperty(CHANGE_GROUP).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
-            if (group != null && !group.trim().
-                    isEmpty()) {
+            final String group = context.getProperty(CHANGE_GROUP).evaluateAttributeExpressions(flowFile).getValue();
+            if (group != null && !group.trim().isEmpty()) {
                 try {
-                    UserPrincipalLookupService lookupService = dotCopyFile.
-                            getFileSystem().
-                            getUserPrincipalLookupService();
-                    PosixFileAttributeView view = Files.
-                            getFileAttributeView(dotCopyFile, PosixFileAttributeView.class);
-                    view.setGroup(lookupService.
-                            lookupPrincipalByGroupName(group));
+                    UserPrincipalLookupService lookupService = dotCopyFile.getFileSystem().getUserPrincipalLookupService();
+                    PosixFileAttributeView view = Files.getFileAttributeView(dotCopyFile, PosixFileAttributeView.class);
+                    view.setGroup(lookupService.lookupPrincipalByGroupName(group));
                 } catch (Exception e) {
-                    logger.
-                            warn("Could not set file group to {} because {}", new Object[]{group, e});
+                    logger.warn("Could not set file group to {} because {}", new Object[]{group, e});
                 }
             }
 
             boolean renamed = false;
             for (int i = 0; i < 10; i++) { // try rename up to 10 times.
-                if (dotCopyFile.toFile().
-                        renameTo(finalCopyFile.toFile())) {
+                if (dotCopyFile.toFile().renameTo(finalCopyFile.toFile())) {
                     renamed = true;
                     break;// rename was successful
                 }
@@ -337,36 +293,27 @@ public class PutFile extends AbstractProcessor {
             }
 
             if (!renamed) {
-                if (Files.exists(dotCopyFile) && dotCopyFile.toFile().
-                        delete()) {
-                    logger.
-                            debug("Deleted dot copy file {}", new Object[]{dotCopyFile});
+                if (Files.exists(dotCopyFile) && dotCopyFile.toFile().delete()) {
+                    logger.debug("Deleted dot copy file {}", new Object[]{dotCopyFile});
                 }
                 throw new ProcessException("Could not rename: " + dotCopyFile);
             } else {
-                logger.
-                        info("Produced copy of {} at location {}", new Object[]{flowFile, finalCopyFile});
+                logger.info("Produced copy of {} at location {}", new Object[]{flowFile, finalCopyFile});
             }
 
-            session.getProvenanceReporter().
-                    send(flowFile, finalCopyFile.toFile().
-                            toURI().
-                            toString(), stopWatch.
-                            getElapsed(TimeUnit.MILLISECONDS));
+            session.getProvenanceReporter().send(flowFile, finalCopyFile.toFile().toURI().toString(), stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             session.transfer(flowFile, REL_SUCCESS);
         } catch (final Throwable t) {
             if (tempDotCopyFile != null) {
                 try {
                     Files.deleteIfExists(tempDotCopyFile);
                 } catch (final Exception e) {
-                    logger.
-                            error("Unable to remove temporary file {} due to {}", new Object[]{tempDotCopyFile, e});
+                    logger.error("Unable to remove temporary file {} due to {}", new Object[]{tempDotCopyFile, e});
                 }
             }
 
             flowFile = session.penalize(flowFile);
-            logger.
-                    error("Penalizing {} and transferring to failure due to {}", new Object[]{flowFile, t});
+            logger.error("Penalizing {} and transferring to failure due to {}", new Object[]{flowFile, t});
             session.transfer(flowFile, REL_FAILURE);
         }
     }
@@ -375,11 +322,9 @@ public class PutFile extends AbstractProcessor {
         String permissions = "";
         final Pattern rwxPattern = Pattern.compile("^[rwx-]{9}$");
         final Pattern numPattern = Pattern.compile("\\d+");
-        if (rwxPattern.matcher(perms).
-                matches()) {
+        if (rwxPattern.matcher(perms).matches()) {
             permissions = perms;
-        } else if (numPattern.matcher(perms).
-                matches()) {
+        } else if (numPattern.matcher(perms).matches()) {
             try {
                 int number = Integer.parseInt(perms, 8);
                 StringBuilder permBuilder = new StringBuilder();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
index 893aee9..b60d07f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
@@ -48,18 +48,18 @@ import java.util.concurrent.TimeUnit;
  */
 public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractProcessor {
 
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("FlowFiles that are successfully sent will be routed to success").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("FlowFiles that failed to send to the remote system; failure is usually looped back to this processor").
-            build();
-    public static final Relationship REL_REJECT = new Relationship.Builder().
-            name("reject").
-            description("FlowFiles that were rejected by the destination system").
-            build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("FlowFiles that are successfully sent will be routed to success")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("FlowFiles that failed to send to the remote system; failure is usually looped back to this processor")
+            .build();
+    public static final Relationship REL_REJECT = new Relationship.Builder()
+            .name("reject")
+            .description("FlowFiles that were rejected by the destination system")
+            .build();
 
     private final Set<Relationship> relationships;
 
@@ -95,43 +95,27 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
         }
 
         final ProcessorLog logger = getLogger();
-        final String hostname = context.getProperty(FileTransfer.HOSTNAME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
+        final String hostname = context.getProperty(FileTransfer.HOSTNAME).evaluateAttributeExpressions(flowFile).getValue();
 
-        final int maxNumberOfFiles = context.
-                getProperty(FileTransfer.BATCH_SIZE).
-                asInteger();
+        final int maxNumberOfFiles = context.getProperty(FileTransfer.BATCH_SIZE).asInteger();
         int fileCount = 0;
         try (final T transfer = getFileTransfer(context)) {
             do {
-                final String rootPath = context.
-                        getProperty(FileTransfer.REMOTE_PATH).
-                        evaluateAttributeExpressions(flowFile).
-                        getValue();
+                final String rootPath = context.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions(flowFile).getValue();
                 final String workingDirPath;
                 if (rootPath == null) {
                     workingDirPath = null;
                 } else {
                     File workingDirectory = new File(rootPath);
-                    if (!workingDirectory.getPath().
-                            startsWith("/") && !workingDirectory.getPath().
-                            startsWith("\\")) {
-                        workingDirectory = new File(transfer.
-                                getHomeDirectory(flowFile), workingDirectory.
-                                getPath());
+                    if (!workingDirectory.getPath().startsWith("/") && !workingDirectory.getPath().startsWith("\\")) {
+                        workingDirectory = new File(transfer.getHomeDirectory(flowFile), workingDirectory.getPath());
                     }
-                    workingDirPath = workingDirectory.getPath().
-                            replace("\\", "/");
+                    workingDirPath = workingDirectory.getPath().replace("\\", "/");
                 }
 
-                final boolean rejectZeroByteFiles = context.
-                        getProperty(FileTransfer.REJECT_ZERO_BYTE).
-                        asBoolean();
-                final ConflictResult conflictResult = identifyAndResolveConflictFile(context.
-                        getProperty(FileTransfer.CONFLICT_RESOLUTION).
-                        getValue(),
-                        transfer, workingDirPath, flowFile, rejectZeroByteFiles, logger);
+                final boolean rejectZeroByteFiles = context.getProperty(FileTransfer.REJECT_ZERO_BYTE).asBoolean();
+                final ConflictResult conflictResult
+                        = identifyAndResolveConflictFile(context.getProperty(FileTransfer.CONFLICT_RESOLUTION).getValue(), transfer, workingDirPath, flowFile, rejectZeroByteFiles, logger);
 
                 if (conflictResult.isTransfer()) {
                     final StopWatch stopWatch = new StopWatch();
@@ -144,37 +128,28 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
                         @Override
                         public void process(final InputStream in) throws IOException {
                             try (final InputStream bufferedIn = new BufferedInputStream(in)) {
-                                if (workingDirPath != null && context.
-                                        getProperty(SFTPTransfer.CREATE_DIRECTORY).
-                                        asBoolean()) {
-                                    transfer.
-                                            ensureDirectoryExists(flowFileToTransfer, new File(workingDirPath));
+                                if (workingDirPath != null && context.getProperty(SFTPTransfer.CREATE_DIRECTORY).asBoolean()) {
+                                    transfer.ensureDirectoryExists(flowFileToTransfer, new File(workingDirPath));
                                 }
 
-                                fullPathRef.set(transfer.
-                                        put(flowFileToTransfer, workingDirPath, conflictResult.
-                                                getFileName(), bufferedIn));
+                                fullPathRef.set(transfer.put(flowFileToTransfer, workingDirPath, conflictResult.getFileName(), bufferedIn));
                             }
                         }
                     });
                     afterPut(flowFile, context, transfer);
 
                     stopWatch.stop();
-                    final String dataRate = stopWatch.
-                            calculateDataRate(flowFile.getSize());
-                    final long millis = stopWatch.
-                            getDuration(TimeUnit.MILLISECONDS);
-                    logger.
-                            info("Successfully transfered {} to {} on remote host {} in {} milliseconds at a rate of {}",
-                                    new Object[]{flowFile, fullPathRef.get(), hostname, millis, dataRate});
+                    final String dataRate = stopWatch.calculateDataRate(flowFile.getSize());
+                    final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);
+                    logger.info("Successfully transfered {} to {} on remote host {} in {} milliseconds at a rate of {}",
+                            new Object[]{flowFile, fullPathRef.get(), hostname, millis, dataRate});
 
                     String fullPathWithSlash = fullPathRef.get();
                     if (!fullPathWithSlash.startsWith("/")) {
                         fullPathWithSlash = "/" + fullPathWithSlash;
                     }
                     final String destinationUri = transfer.getProtocolName() + "://" + hostname + fullPathWithSlash;
-                    session.getProvenanceReporter().
-                            send(flowFile, destinationUri, millis);
+                    session.getProvenanceReporter().send(flowFile, destinationUri, millis);
                 }
 
                 if (conflictResult.isPenalize()) {
@@ -183,28 +158,23 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
 
                 session.transfer(flowFile, conflictResult.getRelationship());
                 session.commit();
-            } while (isScheduled() && (getRelationships().
-                    size() == context.getAvailableRelationships().
-                    size()) && (++fileCount < maxNumberOfFiles) && ((flowFile = session.
-                    get()) != null));
+            } while (isScheduled()
+                    && (getRelationships().size() == context.getAvailableRelationships().size())
+                    && (++fileCount < maxNumberOfFiles)
+                    && ((flowFile = session.get()) != null));
         } catch (final IOException e) {
             context.yield();
-            logger.
-                    error("Unable to transfer {} to remote host {} due to {}", new Object[]{flowFile, hostname, e});
+            logger.error("Unable to transfer {} to remote host {} due to {}", new Object[]{flowFile, hostname, e});
             flowFile = session.penalize(flowFile);
             session.transfer(flowFile, REL_FAILURE);
         } catch (final FlowFileAccessException e) {
             context.yield();
-            logger.
-                    error("Unable to transfer {} to remote host {} due to {}", new Object[]{flowFile, hostname, e.
-                        getCause()});
+            logger.error("Unable to transfer {} to remote host {} due to {}", new Object[]{flowFile, hostname, e.getCause()});
             flowFile = session.penalize(flowFile);
             session.transfer(flowFile, REL_FAILURE);
         } catch (final ProcessException e) {
             context.yield();
-            logger.
-                    error("Unable to transfer {} to remote host {} due to {}: {}; routing to failure", new Object[]{flowFile, hostname, e, e.
-                        getCause()});
+            logger.error("Unable to transfer {} to remote host {} due to {}: {}; routing to failure", new Object[]{flowFile, hostname, e, e.getCause()});
             flowFile = session.penalize(flowFile);
             session.transfer(flowFile, REL_FAILURE);
         }
@@ -222,62 +192,53 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
         if (rejectZeroByteFiles) {
             final long sizeInBytes = flowFile.getSize();
             if (sizeInBytes == 0) {
-                logger.
-                        warn("Rejecting {} because it is zero bytes", new Object[]{flowFile});
+                logger.warn("Rejecting {} because it is zero bytes", new Object[]{flowFile});
                 return new ConflictResult(REL_REJECT, false, fileName, true);
             }
         }
 
         //Second, check if the user doesn't care about detecting naming conflicts ahead of time
-        if (conflictResolutionType.
-                equalsIgnoreCase(FileTransfer.CONFLICT_RESOLUTION_NONE)) {
+        if (conflictResolutionType.equalsIgnoreCase(FileTransfer.CONFLICT_RESOLUTION_NONE)) {
             return new ConflictResult(destinationRelationship, transferFile, fileName, penalizeFile);
         }
 
-        final FileInfo remoteFileInfo = transfer.
-                getRemoteFileInfo(flowFile, path, fileName);
+        final FileInfo remoteFileInfo = transfer.getRemoteFileInfo(flowFile, path, fileName);
         if (remoteFileInfo == null) {
             return new ConflictResult(destinationRelationship, transferFile, fileName, penalizeFile);
         }
 
         if (remoteFileInfo.isDirectory()) {
-            logger.
-                    info("Resolving conflict by rejecting {} due to conflicting filename with a directory or file already on remote server", new Object[]{flowFile});
+            logger.info("Resolving conflict by rejecting {} due to conflicting filename with a directory or file already on remote server", new Object[]{flowFile});
             return new ConflictResult(REL_REJECT, false, fileName, false);
         }
 
-        logger.
-                info("Discovered a filename conflict on the remote server for {} so handling using configured Conflict Resolution of {}",
-                        new Object[]{flowFile, conflictResolutionType});
+        logger.info("Discovered a filename conflict on the remote server for {} so handling using configured Conflict Resolution of {}",
+                new Object[]{flowFile, conflictResolutionType});
 
         switch (conflictResolutionType.toUpperCase()) {
             case FileTransfer.CONFLICT_RESOLUTION_REJECT:
                 destinationRelationship = REL_REJECT;
                 transferFile = false;
                 penalizeFile = false;
-                logger.
-                        info("Resolving conflict by rejecting {} due to conflicting filename with a directory or file already on remote server", new Object[]{flowFile});
+                logger.info("Resolving conflict by rejecting {} due to conflicting filename with a directory or file already on remote server", new Object[]{flowFile});
                 break;
             case FileTransfer.CONFLICT_RESOLUTION_REPLACE:
                 transfer.deleteFile(path, fileName);
                 destinationRelationship = REL_SUCCESS;
                 transferFile = true;
                 penalizeFile = false;
-                logger.
-                        info("Resolving filename conflict for {} with remote server by deleting remote file and replacing with flow file", new Object[]{flowFile});
+                logger.info("Resolving filename conflict for {} with remote server by deleting remote file and replacing with flow file", new Object[]{flowFile});
                 break;
             case FileTransfer.CONFLICT_RESOLUTION_RENAME:
                 boolean uniqueNameGenerated = false;
                 for (int i = 1; i < 100 && !uniqueNameGenerated; i++) {
                     String possibleFileName = i + "." + fileName;
 
-                    final FileInfo renamedFileInfo = transfer.
-                            getRemoteFileInfo(flowFile, path, possibleFileName);
+                    final FileInfo renamedFileInfo = transfer.getRemoteFileInfo(flowFile, path, possibleFileName);
                     uniqueNameGenerated = (renamedFileInfo == null);
                     if (uniqueNameGenerated) {
                         fileName = possibleFileName;
-                        logger.
-                                info("Attempting to resolve filename conflict for {} on the remote server by using a newly generated filename of: {}", new Object[]{flowFile, fileName});
+                        logger.info("Attempting to resolve filename conflict for {} on the remote server by using a newly generated filename of: {}", new Object[]{flowFile, fileName});
                         destinationRelationship = REL_SUCCESS;
                         transferFile = true;
                         penalizeFile = false;
@@ -288,23 +249,20 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
                     destinationRelationship = REL_REJECT;
                     transferFile = false;
                     penalizeFile = false;
-                    logger.
-                            info("Could not determine a unique name after 99 attempts for.  Switching resolution mode to REJECT for " + flowFile);
+                    logger.info("Could not determine a unique name after 99 attempts for.  Switching resolution mode to REJECT for " + flowFile);
                 }
                 break;
             case FileTransfer.CONFLICT_RESOLUTION_IGNORE:
                 destinationRelationship = REL_SUCCESS;
                 transferFile = false;
                 penalizeFile = false;
-                logger.
-                        info("Resolving conflict for {}  by not transferring file and and still considering the process a success.", new Object[]{flowFile});
+                logger.info("Resolving conflict for {}  by not transferring file and and still considering the process a success.", new Object[]{flowFile});
                 break;
             case FileTransfer.CONFLICT_RESOLUTION_FAIL:
                 destinationRelationship = REL_FAILURE;
                 transferFile = false;
                 penalizeFile = true;
-                logger.
-                        info("Resolved filename conflict for {} as configured by routing to FAILURE relationship.", new Object[]{flowFile});
+                logger.info("Resolved filename conflict for {} as configured by routing to FAILURE relationship.", new Object[]{flowFile});
             default:
                 break;
         }


[26/50] [abbrv] incubator-nifi git commit: Merge branch 'develop' of https://git-wip-us.apache.org/repos/asf/incubator-nifi into develop

Posted by mc...@apache.org.
Merge branch 'develop' of https://git-wip-us.apache.org/repos/asf/incubator-nifi into develop


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/43b2f040
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/43b2f040
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/43b2f040

Branch: refs/heads/NIFI-292
Commit: 43b2f040bcb181abc435e4f8f1c5da0591141254
Parents: 21209b2 384b2ac
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 14:26:26 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 14:26:26 2015 -0400

----------------------------------------------------------------------
 .../nifi/provenance/IndexConfiguration.java     |  12 +-
 .../PersistentProvenanceRepository.java         | 612 +++++++-------
 .../provenance/RepositoryConfiguration.java     | 106 +--
 .../nifi/provenance/StandardRecordReader.java   | 246 +++---
 .../nifi/provenance/StandardRecordWriter.java   | 138 ++--
 .../provenance/expiration/ExpirationAction.java |   6 +-
 .../provenance/lucene/DeleteIndexAction.java    |  12 +-
 .../nifi/provenance/lucene/DocsReader.java      |  79 +-
 .../nifi/provenance/lucene/IndexManager.java    | 820 +++++++++----------
 .../nifi/provenance/lucene/IndexSearch.java     |  38 +-
 .../nifi/provenance/lucene/IndexingAction.java  | 119 +--
 .../nifi/provenance/lucene/LineageQuery.java    |   6 +-
 .../nifi/provenance/lucene/LuceneUtil.java      |  38 +-
 .../provenance/rollover/CompressionAction.java  |  59 --
 .../provenance/rollover/RolloverAction.java     |  35 -
 .../provenance/serialization/RecordReader.java  |  57 +-
 .../provenance/serialization/RecordReaders.java | 136 +--
 .../provenance/serialization/RecordWriter.java  |  23 +-
 .../provenance/serialization/RecordWriters.java |   8 +-
 .../nifi/provenance/toc/StandardTocReader.java  |  44 +-
 .../nifi/provenance/toc/StandardTocWriter.java  |  35 +-
 .../apache/nifi/provenance/toc/TocReader.java   |  20 +-
 .../org/apache/nifi/provenance/toc/TocUtil.java |  27 +-
 .../apache/nifi/provenance/toc/TocWriter.java   |  16 +-
 .../TestPersistentProvenanceRepository.java     | 118 +--
 .../TestStandardRecordReaderWriter.java         | 162 ++--
 .../org/apache/nifi/provenance/TestUtil.java    |   2 +-
 .../provenance/toc/TestStandardTocReader.java   |  20 +-
 .../provenance/toc/TestStandardTocWriter.java   |   4 +-
 29 files changed, 1391 insertions(+), 1607 deletions(-)
----------------------------------------------------------------------



[14/50] [abbrv] incubator-nifi git commit: Merge branch 'develop' of http://git-wip-us.apache.org/repos/asf/incubator-nifi into develop

Posted by mc...@apache.org.
Merge branch 'develop' of http://git-wip-us.apache.org/repos/asf/incubator-nifi into develop


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/666de3d4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/666de3d4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/666de3d4

Branch: refs/heads/NIFI-292
Commit: 666de3d410a8f80a8ad4a90e5fbcce08c777103e
Parents: ba96e43 d29a2d6
Author: Mark Payne <ma...@hotmail.com>
Authored: Mon Apr 27 12:04:35 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Mon Apr 27 12:04:35 2015 -0400

----------------------------------------------------------------------
 .../standard/AbstractJsonPathProcessor.java     |  11 +-
 .../standard/ConvertCharacterSet.java           |   2 +-
 .../nifi/processors/standard/HashAttribute.java |  34 +-
 .../nifi/processors/standard/PutEmail.java      | 302 +++++++---------
 .../apache/nifi/processors/standard/PutFTP.java |  24 +-
 .../nifi/processors/standard/PutFile.java       | 275 ++++++--------
 .../processors/standard/PutFileTransfer.java    | 140 +++-----
 .../apache/nifi/processors/standard/PutJMS.java | 161 +++------
 .../nifi/processors/standard/ReplaceText.java   | 169 ++++-----
 .../standard/ReplaceTextWithMapping.java        | 231 +++++-------
 .../processors/standard/RouteOnAttribute.java   | 136 +++----
 .../processors/standard/RouteOnContent.java     | 147 +++-----
 .../nifi/processors/standard/ScanAttribute.java | 121 +++----
 .../nifi/processors/standard/ScanContent.java   |  86 ++---
 .../processors/standard/SegmentContent.java     |  54 ++-
 .../nifi/processors/standard/SplitContent.java  | 125 +++----
 .../nifi/processors/standard/SplitJson.java     |  77 ++--
 .../nifi/processors/standard/SplitText.java     | 142 ++++----
 .../nifi/processors/standard/SplitXml.java      |  70 ++--
 .../nifi/processors/standard/TransformXml.java  |  87 ++---
 .../nifi/processors/standard/UnpackContent.java | 215 +++++------
 .../servlets/ContentAcknowledgmentServlet.java  |  55 +--
 .../standard/servlets/ListenHTTPServlet.java    | 142 +++-----
 .../nifi/processors/standard/util/Bin.java      |  22 +-
 .../processors/standard/util/BinManager.java    |  41 +--
 .../standard/util/DocumentReaderCallback.java   |   6 +-
 .../processors/standard/util/FTPTransfer.java   | 351 +++++++-----------
 .../nifi/processors/standard/util/FTPUtils.java |  95 ++---
 .../nifi/processors/standard/util/FileInfo.java |   3 +-
 .../processors/standard/util/FileTransfer.java  | 356 +++++++++----------
 .../processors/standard/util/JmsFactory.java    | 128 +++----
 .../processors/standard/util/JmsProperties.java | 256 ++++++-------
 .../util/JsonPathExpressionValidator.java       |  27 +-
 .../standard/util/NLKBufferedReader.java        |  14 +-
 .../processors/standard/util/SFTPTransfer.java  | 351 +++++++-----------
 .../processors/standard/util/SFTPUtils.java     | 167 ++++-----
 .../standard/util/UDPStreamConsumer.java        |  25 +-
 .../util/ValidatingBase32InputStream.java       |   3 +-
 .../util/ValidatingBase64InputStream.java       |   3 +-
 .../standard/util/WrappedMessageConsumer.java   |   9 +-
 .../standard/util/WrappedMessageProducer.java   |   9 +-
 .../src/test/java/TestIngestAndUpdate.java      |   3 +-
 .../processors/standard/CaptureServlet.java     |   3 +-
 .../standard/RESTServiceContentModified.java    |  15 +-
 .../standard/TestBase64EncodeContent.java       |  42 +--
 .../standard/TestCompressContent.java           |  85 ++---
 .../processors/standard/TestControlRate.java    |   3 +-
 .../standard/TestConvertCharacterSet.java       |  13 +-
 .../standard/TestDetectDuplicate.java           |  33 +-
 .../processors/standard/TestDistributeLoad.java |  19 +-
 .../processors/standard/TestEncodeContent.java  |  66 ++--
 .../processors/standard/TestEncryptContent.java |  30 +-
 .../standard/TestEvaluateJsonPath.java          | 219 ++++--------
 .../processors/standard/TestEvaluateXPath.java  | 106 ++----
 .../processors/standard/TestEvaluateXQuery.java | 312 +++++-----------
 .../processors/standard/TestExecuteProcess.java |  18 +-
 .../standard/TestExecuteStreamCommand.java      | 135 +++----
 .../processors/standard/TestExtractText.java    |  81 ++---
 .../nifi/processors/standard/TestGetFile.java   |  63 ++--
 .../nifi/processors/standard/TestGetHTTP.java   |  57 +--
 .../processors/standard/TestGetJMSQueue.java    |  63 ++--
 .../standard/TestHandleHttpRequest.java         |  19 +-
 .../standard/TestHandleHttpResponse.java        |  81 ++---
 .../processors/standard/TestHashAttribute.java  |   8 +-
 .../processors/standard/TestHashContent.java    |   5 +-
 .../standard/TestIdentifyMimeType.java          |  16 +-
 .../processors/standard/TestInvokeHTTP.java     | 137 ++-----
 .../processors/standard/TestJmsConsumer.java    |  88 ++---
 .../nifi/processors/standard/TestListenUDP.java |  39 +-
 .../processors/standard/TestMergeContent.java   | 176 +++------
 .../processors/standard/TestModifyBytes.java    |  82 ++---
 .../standard/TestMonitorActivity.java           |  84 ++---
 .../nifi/processors/standard/TestPostHTTP.java  | 102 ++----
 .../nifi/processors/standard/TestPutEmail.java  |  17 +-
 .../processors/standard/TestReplaceText.java    |  81 ++---
 .../standard/TestReplaceTextLineByLine.java     | 204 ++++-------
 .../standard/TestReplaceTextWithMapping.java    | 316 +++++-----------
 .../standard/TestRouteOnAttribute.java          |  65 ++--
 .../processors/standard/TestRouteOnContent.java |  19 +-
 .../processors/standard/TestScanAttribute.java  |  16 +-
 .../processors/standard/TestScanContent.java    |  34 +-
 .../processors/standard/TestSegmentContent.java |  14 +-
 .../nifi/processors/standard/TestServer.java    |  21 +-
 .../processors/standard/TestSplitContent.java   | 196 ++++------
 .../nifi/processors/standard/TestSplitJson.java |  91 ++---
 .../nifi/processors/standard/TestSplitText.java |  80 ++---
 .../nifi/processors/standard/TestSplitXml.java  |   4 +-
 .../processors/standard/TestTransformXml.java   |  45 +--
 .../processors/standard/TestUnpackContent.java  | 131 +++----
 .../processors/standard/TestValidateXml.java    |   4 +-
 .../standard/UserAgentTestingServlet.java       |   1 -
 91 files changed, 2933 insertions(+), 5281 deletions(-)
----------------------------------------------------------------------



[02/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextWithMapping.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextWithMapping.java
index 89f330b..7a480a8 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextWithMapping.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestReplaceTextWithMapping.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ReplaceTextWithMapping;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
@@ -36,23 +35,15 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testSimple() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        final String mappingFile = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath();
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        final String mappingFile = Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath();
         runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, mappingFile);
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are apple\n"
                 + "violets are blueberry\n"
@@ -63,12 +54,8 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testExpressionLanguageInText() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        final String mappingFile = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath();
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        final String mappingFile = Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath();
         runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, mappingFile);
 
         String text = "${foo} red ${baz}";
@@ -76,11 +63,8 @@ public class TestReplaceTextWithMapping {
         runner.enqueue(text.getBytes());
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "${foo} apple ${baz}";
         assertEquals(expected, outputString);
@@ -88,27 +72,19 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testExpressionLanguageInText2() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        final String mappingFile = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath();
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        final String mappingFile = Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath();
         runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, mappingFile);
         runner.setProperty(ReplaceTextWithMapping.REGEX, "\\|(.*?)\\|");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
 
         String text = "${foo}|red|${baz}";
 
         runner.enqueue(text.getBytes());
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "${foo}|apple|${baz}";
         assertEquals(expected, outputString);
@@ -116,27 +92,19 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testExpressionLanguageInText3() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        final String mappingFile = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath();
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        final String mappingFile = Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath();
         runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, mappingFile);
         runner.setProperty(ReplaceTextWithMapping.REGEX, ".*\\|(.*?)\\|.*");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
 
         String text = "${foo}|red|${baz}";
 
         runner.enqueue(text.getBytes());
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "${foo}|apple|${baz}";
         assertEquals(expected, outputString);
@@ -144,25 +112,16 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testWithMatchingGroupAndContext() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "-(.*?)-");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "-roses- are -apple-\n"
                 + "violets are -blueberry-\n"
@@ -173,25 +132,16 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testBackReference() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "(\\S+)");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-backreference-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-backreference-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are red apple\n"
                 + "violets are blue blueberry\n"
@@ -202,47 +152,32 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testRoutesToFailureIfTooLarge() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "[123]");
         runner.setProperty(ReplaceTextWithMapping.MAX_BUFFER_SIZE, "1 b");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").
-                toFile().
-                getAbsolutePath());
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-mapping.txt").toFile().getAbsolutePath());
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "Good");
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
 
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_FAILURE, 1);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_FAILURE, 1);
     }
 
     @Test
     public void testBackReferenceWithTooLargeOfIndexIsEscaped() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "-(.*?)-");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-excessive-backreference-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-excessive-backreference-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "-roses- are -red$2 apple-\n"
                 + "violets are -blue$2 blueberry-\n"
@@ -253,23 +188,15 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testBackReferenceWithTooLargeOfIndexIsEscapedSimple() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE,
-                Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-excessive-backreference-mapping-simple.txt").
-                toFile().
-                getAbsolutePath());
+                Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-excessive-backreference-mapping-simple.txt").toFile().getAbsolutePath());
 
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are red$1 apple\n"
                 + "violets are blue$1 blueberry\n"
@@ -280,25 +207,16 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testBackReferenceWithInvalidReferenceIsEscaped() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "(\\S+)");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-invalid-backreference-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-invalid-backreference-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are red$d apple\n"
                 + "violets are blue$d blueberry\n"
@@ -309,25 +227,16 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testEscapingDollarSign() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "-(.*?)-");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-escaped-dollar-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-escaped-dollar-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "-roses- are -$1 apple-\n"
                 + "violets are -$1 blueberry-\n"
@@ -338,22 +247,14 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testEscapingDollarSignSimple() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-escaped-dollar-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-escaped-dollar-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are $1 apple\n"
                 + "violets are $1 blueberry\n"
@@ -364,22 +265,14 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testReplaceWithEmptyString() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-blank-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-blank-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are \n"
                 + "violets are \n"
@@ -390,22 +283,14 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testReplaceWithSpaceInString() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-space-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        runner.enqueue(Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-space-mapping.txt").toFile().getAbsolutePath());
+
+        runner.enqueue(Paths.get("src/test/resources/TestReplaceTextWithMapping/colors-without-dashes.txt"));
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = "roses are really red\n"
                 + "violets are super blue\n"
@@ -416,26 +301,17 @@ public class TestReplaceTextWithMapping {
 
     @Test
     public void testWithNoMatch() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "-(.*?)-");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-fruit-no-match-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        final Path path = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt");
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "1");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-fruit-no-match-mapping.txt").toFile().getAbsolutePath());
+
+        final Path path = Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt");
         runner.enqueue(path);
         runner.run();
 
-        runner.
-                assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
-        final MockFlowFile out = runner.
-                getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).
-                get(0);
+        runner.assertAllFlowFilesTransferred(ReplaceTextWithMapping.REL_SUCCESS, 1);
+        final MockFlowFile out = runner.getFlowFilesForRelationship(ReplaceTextWithMapping.REL_SUCCESS).get(0);
         String outputString = new String(out.toByteArray());
         String expected = new String(Files.readAllBytes(path));
         assertEquals(expected, outputString);
@@ -443,18 +319,12 @@ public class TestReplaceTextWithMapping {
 
     @Test(expected = java.lang.AssertionError.class)
     public void testMatchingGroupForLookupKeyTooLarge() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new ReplaceTextWithMapping());
+        final TestRunner runner = TestRunners.newTestRunner(new ReplaceTextWithMapping());
         runner.setProperty(ReplaceTextWithMapping.REGEX, "-(.*?)-");
-        runner.
-                setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "2");
-        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/color-mapping.txt").
-                toFile().
-                getAbsolutePath());
-
-        final Path path = Paths.
-                get("src/test/resources/TestReplaceTextWithMapping/colors.txt");
+        runner.setProperty(ReplaceTextWithMapping.MATCHING_GROUP_FOR_LOOKUP_KEY, "2");
+        runner.setProperty(ReplaceTextWithMapping.MAPPING_FILE, Paths.get("src/test/resources/TestReplaceTextWithMapping/color-mapping.txt").toFile().getAbsolutePath());
+
+        final Path path = Paths.get("src/test/resources/TestReplaceTextWithMapping/colors.txt");
         runner.enqueue(path);
         runner.run();
     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnAttribute.java
index 56996fe..2eac3f2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnAttribute.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.RouteOnAttribute;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
@@ -40,8 +39,7 @@ public class TestRouteOnAttribute {
     public void testInvalidOnMisconfiguredProperty() {
         final RouteOnAttribute proc = new RouteOnAttribute();
         final MockProcessContext ctx = new MockProcessContext(proc);
-        final ValidationResult validationResult = ctx.
-                setProperty("RouteA", "${a:equals('b')"); // Missing closing brace
+        final ValidationResult validationResult = ctx.setProperty("RouteA", "${a:equals('b')"); // Missing closing brace
         assertFalse(validationResult.isValid());
     }
 
@@ -49,15 +47,13 @@ public class TestRouteOnAttribute {
     public void testInvalidOnNonBooleanProperty() {
         final RouteOnAttribute proc = new RouteOnAttribute();
         final MockProcessContext ctx = new MockProcessContext(proc);
-        final ValidationResult validationResult = ctx.
-                setProperty("RouteA", "${a:length()"); // Should be boolean
+        final ValidationResult validationResult = ctx.setProperty("RouteA", "${a:length()"); // Should be boolean
         assertFalse(validationResult.isValid());
     }
 
     @Test
     public void testSimpleEquals() {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnAttribute());
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnAttribute());
         runner.setProperty("RouteA", "${a:equals('b')}");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -66,24 +62,16 @@ public class TestRouteOnAttribute {
 
         runner.run();
 
-        runner.assertAllFlowFilesTransferred(new Relationship.Builder().
-                name("RouteA").
-                build(), 1);
-        final List<MockFlowFile> flowFiles = runner.
-                getFlowFilesForRelationship("RouteA");
-        flowFiles.get(0).
-                assertAttributeEquals("a", "b");
-        flowFiles.get(0).
-                assertAttributeEquals(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY, "RouteA");
+        runner.assertAllFlowFilesTransferred(new Relationship.Builder().name("RouteA").build(), 1);
+        final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship("RouteA");
+        flowFiles.get(0).assertAttributeEquals("a", "b");
+        flowFiles.get(0).assertAttributeEquals(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY, "RouteA");
     }
 
     @Test
     public void testMatchAll() {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnAttribute());
-        runner.
-                setProperty(RouteOnAttribute.ROUTE_STRATEGY, RouteOnAttribute.ROUTE_ALL_MATCH.
-                        getValue());
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnAttribute());
+        runner.setProperty(RouteOnAttribute.ROUTE_STRATEGY, RouteOnAttribute.ROUTE_ALL_MATCH.getValue());
         runner.setProperty("RouteA", "${a:equals('b')}");
         runner.setProperty("RouteB", "${b:equals('a')}");
 
@@ -103,36 +91,27 @@ public class TestRouteOnAttribute {
 
         runner.run(4);
 
-        final List<MockFlowFile> match = runner.
-                getFlowFilesForRelationship(RouteOnAttribute.REL_MATCH);
-        final List<MockFlowFile> noMatch = runner.
-                getFlowFilesForRelationship(RouteOnAttribute.REL_NO_MATCH);
+        final List<MockFlowFile> match = runner.getFlowFilesForRelationship(RouteOnAttribute.REL_MATCH);
+        final List<MockFlowFile> noMatch = runner.getFlowFilesForRelationship(RouteOnAttribute.REL_NO_MATCH);
 
         assertEquals(1, match.size());
         assertEquals(3, noMatch.size());
 
         for (final MockFlowFile ff : noMatch) {
-            ff.
-                    assertAttributeEquals(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY, "unmatched");
+            ff.assertAttributeEquals(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY, "unmatched");
         }
 
-        final Map<String, String> matchedAttrs = match.iterator().
-                next().
-                getAttributes();
+        final Map<String, String> matchedAttrs = match.iterator().next().getAttributes();
         assertEquals("b", matchedAttrs.get("a"));
         assertEquals("a", matchedAttrs.get("b"));
-        assertEquals("matched", matchedAttrs.
-                get(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY));
+        assertEquals("matched", matchedAttrs.get(RouteOnAttribute.ROUTE_ATTRIBUTE_KEY));
     }
 
     @Test
     public void testMatchAny() {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnAttribute());
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnAttribute());
         runner.setThreadCount(4);
-        runner.
-                setProperty(RouteOnAttribute.ROUTE_STRATEGY, RouteOnAttribute.ROUTE_ANY_MATCHES.
-                        getValue());
+        runner.setProperty(RouteOnAttribute.ROUTE_STRATEGY, RouteOnAttribute.ROUTE_ANY_MATCHES.getValue());
         runner.setProperty("RouteA", "${a:equals('b')}");
         runner.setProperty("RouteB", "${b:equals('a')}");
 
@@ -152,20 +131,16 @@ public class TestRouteOnAttribute {
 
         runner.run(4);
 
-        final List<MockFlowFile> match = runner.
-                getFlowFilesForRelationship(RouteOnAttribute.REL_MATCH);
-        final List<MockFlowFile> noMatch = runner.
-                getFlowFilesForRelationship(RouteOnAttribute.REL_NO_MATCH);
+        final List<MockFlowFile> match = runner.getFlowFilesForRelationship(RouteOnAttribute.REL_MATCH);
+        final List<MockFlowFile> noMatch = runner.getFlowFilesForRelationship(RouteOnAttribute.REL_NO_MATCH);
 
         assertEquals(2, match.size());
         assertEquals(2, noMatch.size());
 
         // Get attributes for both matching FlowFiles
         final Iterator<MockFlowFile> itr = match.iterator();
-        final Map<String, String> attrs1 = itr.next().
-                getAttributes();
-        final Map<String, String> attrs2 = itr.next().
-                getAttributes();
+        final Map<String, String> attrs1 = itr.next().getAttributes();
+        final Map<String, String> attrs2 = itr.next().getAttributes();
 
         // Both matches should map a -> b
         assertEquals("b", attrs1.get("a"));

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnContent.java
index 96c281a..fb89d86 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestRouteOnContent.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.RouteOnContent;
 import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.HashMap;
@@ -31,10 +30,8 @@ public class TestRouteOnContent {
 
     @Test
     public void testCloning() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnContent());
-        runner.
-                setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_SUBSEQUENCE);
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnContent());
+        runner.setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_SUBSEQUENCE);
         runner.setProperty("hello", "Hello");
         runner.setProperty("world", "World");
 
@@ -48,10 +45,8 @@ public class TestRouteOnContent {
 
     @Test
     public void testSubstituteAttributes() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnContent());
-        runner.
-                setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_SUBSEQUENCE);
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnContent());
+        runner.setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_SUBSEQUENCE);
         runner.setProperty("attr", "Hel${highLow}");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -64,10 +59,8 @@ public class TestRouteOnContent {
 
     @Test
     public void testBufferSize() throws IOException {
-        final TestRunner runner = TestRunners.
-                newTestRunner(new RouteOnContent());
-        runner.
-                setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_ALL);
+        final TestRunner runner = TestRunners.newTestRunner(new RouteOnContent());
+        runner.setProperty(RouteOnContent.MATCH_REQUIREMENT, RouteOnContent.MATCH_ALL);
         runner.setProperty(RouteOnContent.BUFFER_SIZE, "3 B");
         runner.setProperty("rel", "Hel");
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanAttribute.java
index 982cf57..b4a4136 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanAttribute.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.ScanAttribute;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -30,8 +29,7 @@ public class TestScanAttribute {
     @Test
     public void testSingleMatch() {
         final TestRunner runner = TestRunners.newTestRunner(new ScanAttribute());
-        runner.
-                setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary1");
+        runner.setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary1");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "world");
@@ -67,10 +65,8 @@ public class TestScanAttribute {
     @Test
     public void testAllMatch() {
         final TestRunner runner = TestRunners.newTestRunner(new ScanAttribute());
-        runner.
-                setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary1");
-        runner.
-                setProperty(ScanAttribute.MATCHING_CRITERIA, ScanAttribute.MATCH_CRITERIA_ALL);
+        runner.setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary1");
+        runner.setProperty(ScanAttribute.MATCHING_CRITERIA, ScanAttribute.MATCH_CRITERIA_ALL);
         runner.setProperty(ScanAttribute.ATTRIBUTE_PATTERN, "a.*");
 
         final Map<String, String> attributes = new HashMap<>();
@@ -106,8 +102,7 @@ public class TestScanAttribute {
     @Test
     public void testWithEmptyEntries() {
         final TestRunner runner = TestRunners.newTestRunner(new ScanAttribute());
-        runner.
-                setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary-with-empty-new-lines");
+        runner.setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary-with-empty-new-lines");
 
         final Map<String, String> attributes = new HashMap<>();
         attributes.put("abc", "");
@@ -127,8 +122,7 @@ public class TestScanAttribute {
     @Test
     public void testWithDictionaryFilter() {
         final TestRunner runner = TestRunners.newTestRunner(new ScanAttribute());
-        runner.
-                setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary-with-extra-info");
+        runner.setProperty(ScanAttribute.DICTIONARY_FILE, "src/test/resources/ScanAttribute/dictionary-with-extra-info");
         runner.setProperty(ScanAttribute.DICTIONARY_FILTER, "(.*)<greeting>");
 
         final Map<String, String> attributes = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanContent.java
index 442aa63..8c36845 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestScanContent.java
@@ -50,22 +50,15 @@ public class TestScanContent {
             final byte[] termBytes = baos.toByteArray();
 
             final Path dictionaryPath = Paths.get("target/dictionary");
-            Files.
-                    write(dictionaryPath, termBytes, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
+            Files.write(dictionaryPath, termBytes, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
 
-            final TestRunner runner = TestRunners.
-                    newTestRunner(new ScanContent());
+            final TestRunner runner = TestRunners.newTestRunner(new ScanContent());
             runner.setThreadCount(1);
-            runner.
-                    setProperty(ScanContent.DICTIONARY, dictionaryPath.
-                            toString());
-            runner.
-                    setProperty(ScanContent.DICTIONARY_ENCODING, ScanContent.BINARY_ENCODING);
+            runner.setProperty(ScanContent.DICTIONARY, dictionaryPath.toString());
+            runner.setProperty(ScanContent.DICTIONARY_ENCODING, ScanContent.BINARY_ENCODING);
 
-            runner.enqueue(Paths.
-                    get("src/test/resources/TestScanContent/helloWorld"));
-            runner.enqueue(Paths.
-                    get("src/test/resources/TestScanContent/wellthengood-bye"));
+            runner.enqueue(Paths.get("src/test/resources/TestScanContent/helloWorld"));
+            runner.enqueue(Paths.get("src/test/resources/TestScanContent/wellthengood-bye"));
             runner.enqueue(new byte[0]);
 
             while (!runner.isQueueEmpty()) {
@@ -79,18 +72,13 @@ public class TestScanContent {
 
             runner.assertTransferCount(ScanContent.REL_MATCH, 2);
             runner.assertTransferCount(ScanContent.REL_NO_MATCH, 1);
-            final List<MockFlowFile> matched = runner.
-                    getFlowFilesForRelationship(ScanContent.REL_MATCH);
-            final List<MockFlowFile> unmatched = runner.
-                    getFlowFilesForRelationship(ScanContent.REL_NO_MATCH);
+            final List<MockFlowFile> matched = runner.getFlowFilesForRelationship(ScanContent.REL_MATCH);
+            final List<MockFlowFile> unmatched = runner.getFlowFilesForRelationship(ScanContent.REL_NO_MATCH);
 
-            matched.get(0).
-                    assertAttributeEquals(ScanContent.MATCH_ATTRIBUTE_KEY, "hello");
-            matched.get(1).
-                    assertAttributeEquals(ScanContent.MATCH_ATTRIBUTE_KEY, "good-bye");
+            matched.get(0).assertAttributeEquals(ScanContent.MATCH_ATTRIBUTE_KEY, "hello");
+            matched.get(1).assertAttributeEquals(ScanContent.MATCH_ATTRIBUTE_KEY, "good-bye");
 
-            unmatched.get(0).
-                    assertAttributeNotExists(ScanContent.MATCH_ATTRIBUTE_KEY);
+            unmatched.get(0).assertAttributeNotExists(ScanContent.MATCH_ATTRIBUTE_KEY);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSegmentContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSegmentContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSegmentContent.java
index 7a6001c..5a88323 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSegmentContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSegmentContent.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.SegmentContent;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
@@ -32,15 +31,13 @@ public class TestSegmentContent {
 
     @Test
     public void test() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new SegmentContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new SegmentContent());
         testRunner.setProperty(SegmentContent.SIZE, "4 B");
 
         testRunner.enqueue(new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9});
         testRunner.run();
 
-        final List<MockFlowFile> flowFiles = testRunner.
-                getFlowFilesForRelationship(SegmentContent.REL_SEGMENTS);
+        final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(SegmentContent.REL_SEGMENTS);
         assertEquals(3, flowFiles.size());
 
         final MockFlowFile out1 = flowFiles.get(0);
@@ -54,17 +51,14 @@ public class TestSegmentContent {
 
     @Test
     public void testTransferSmall() throws IOException {
-        final TestRunner testRunner = TestRunners.
-                newTestRunner(new SegmentContent());
+        final TestRunner testRunner = TestRunners.newTestRunner(new SegmentContent());
         testRunner.setProperty(SegmentContent.SIZE, "4 KB");
 
         testRunner.enqueue(new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9});
         testRunner.run();
 
         testRunner.assertTransferCount(SegmentContent.REL_SEGMENTS, 1);
-        final MockFlowFile out1 = testRunner.
-                getFlowFilesForRelationship(SegmentContent.REL_SEGMENTS).
-                get(0);
+        final MockFlowFile out1 = testRunner.getFlowFilesForRelationship(SegmentContent.REL_SEGMENTS).get(0);
         out1.assertContentEquals(new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9});
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestServer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestServer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestServer.java
index b9c623e..7e5dd7b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestServer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestServer.java
@@ -44,8 +44,7 @@ public class TestServer {
     /**
      * Creates the test server.
      *
-     * @param sslProperties SSLProps to be used in the secure connection. The
-     * keys should should use the StandardSSLContextService properties.
+     * @param sslProperties SSLProps to be used in the secure connection. The keys should should use the StandardSSLContextService properties.
      */
     public TestServer(final Map<String, String> sslProperties) {
         createServer(sslProperties);
@@ -78,21 +77,15 @@ public class TestServer {
         SslContextFactory ssl = new SslContextFactory();
 
         if (sslProperties.get(StandardSSLContextService.KEYSTORE.getName()) != null) {
-            ssl.setKeyStorePath(sslProperties.
-                    get(StandardSSLContextService.KEYSTORE.getName()));
-            ssl.setKeyStorePassword(sslProperties.
-                    get(StandardSSLContextService.KEYSTORE_PASSWORD.getName()));
-            ssl.setKeyStoreType(sslProperties.
-                    get(StandardSSLContextService.KEYSTORE_TYPE.getName()));
+            ssl.setKeyStorePath(sslProperties.get(StandardSSLContextService.KEYSTORE.getName()));
+            ssl.setKeyStorePassword(sslProperties.get(StandardSSLContextService.KEYSTORE_PASSWORD.getName()));
+            ssl.setKeyStoreType(sslProperties.get(StandardSSLContextService.KEYSTORE_TYPE.getName()));
         }
 
         if (sslProperties.get(StandardSSLContextService.TRUSTSTORE.getName()) != null) {
-            ssl.setTrustStorePath(sslProperties.
-                    get(StandardSSLContextService.TRUSTSTORE.getName()));
-            ssl.setTrustStorePassword(sslProperties.
-                    get(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName()));
-            ssl.setTrustStoreType(sslProperties.
-                    get(StandardSSLContextService.TRUSTSTORE_TYPE.getName()));
+            ssl.setTrustStorePath(sslProperties.get(StandardSSLContextService.TRUSTSTORE.getName()));
+            ssl.setTrustStorePassword(sslProperties.get(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName()));
+            ssl.setTrustStoreType(sslProperties.get(StandardSSLContextService.TRUSTSTORE_TYPE.getName()));
         }
 
         final String clientAuth = sslProperties.get(NEED_CLIENT_AUTH);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitContent.java
index ea3da22..6d9fba9 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitContent.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processors.standard.MergeContent;
-import org.apache.nifi.processors.standard.SplitContent;
 import java.io.IOException;
 import java.util.List;
 
@@ -32,13 +30,10 @@ public class TestSplitContent {
     @Test
     public void testTextFormatLeadingPosition() {
         final TestRunner runner = TestRunners.newTestRunner(new SplitContent());
-        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.
-                getValue());
+        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.getValue());
         runner.setProperty(SplitContent.BYTE_SEQUENCE, "ub");
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.
-                        getValue());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.getValue());
 
         runner.enqueue("rub-a-dub-dub".getBytes());
         runner.run();
@@ -47,31 +42,22 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 4);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("r");
-        splits.get(1).
-                assertContentEquals("ub-a-d");
-        splits.get(2).
-                assertContentEquals("ub-d");
-        splits.get(3).
-                assertContentEquals("ub");
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        splits.get(0).assertContentEquals("r");
+        splits.get(1).assertContentEquals("ub-a-d");
+        splits.get(2).assertContentEquals("ub-d");
+        splits.get(3).assertContentEquals("ub");
     }
 
     @Test
     public void testTextFormatSplits() {
         final TestRunner runner = TestRunners.newTestRunner(new SplitContent());
-        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.
-                getValue());
+        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.getValue());
         runner.setProperty(SplitContent.BYTE_SEQUENCE, "test");
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.
-                        getValue());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.getValue());
 
-        final byte[] input = "This is a test. This is another test. And this is yet another test. Finally this is the last Test.".
-                getBytes();
+        final byte[] input = "This is a test. This is another test. And this is yet another test. Finally this is the last Test.".getBytes();
         runner.enqueue(input);
         runner.run();
 
@@ -79,16 +65,11 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 4);
 
         runner.assertQueueEmpty();
-        List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("This is a ");
-        splits.get(1).
-                assertContentEquals("test. This is another ");
-        splits.get(2).
-                assertContentEquals("test. And this is yet another ");
-        splits.get(3).
-                assertContentEquals("test. Finally this is the last Test.");
+        List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        splits.get(0).assertContentEquals("This is a ");
+        splits.get(1).assertContentEquals("test. This is another ");
+        splits.get(2).assertContentEquals("test. And this is yet another ");
+        splits.get(3).assertContentEquals("test. Finally this is the last Test.");
         runner.clearTransferState();
 
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "false");
@@ -97,77 +78,50 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 4);
         splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("This is a ");
-        splits.get(1).
-                assertContentEquals(". This is another ");
-        splits.get(2).
-                assertContentEquals(". And this is yet another ");
-        splits.get(3).
-                assertContentEquals(". Finally this is the last Test.");
+        splits.get(0).assertContentEquals("This is a ");
+        splits.get(1).assertContentEquals(". This is another ");
+        splits.get(2).assertContentEquals(". And this is yet another ");
+        splits.get(3).assertContentEquals(". Finally this is the last Test.");
         runner.clearTransferState();
 
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.
-                        getValue());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.getValue());
         runner.enqueue(input);
         runner.run();
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 4);
         splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("This is a test");
-        splits.get(1).
-                assertContentEquals(". This is another test");
-        splits.get(2).
-                assertContentEquals(". And this is yet another test");
-        splits.get(3).
-                assertContentEquals(". Finally this is the last Test.");
+        splits.get(0).assertContentEquals("This is a test");
+        splits.get(1).assertContentEquals(". This is another test");
+        splits.get(2).assertContentEquals(". And this is yet another test");
+        splits.get(3).assertContentEquals(". Finally this is the last Test.");
         runner.clearTransferState();
 
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.
-                        getValue());
-        runner.
-                enqueue("This is a test. This is another test. And this is yet another test. Finally this is the last test".
-                        getBytes());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.getValue());
+        runner.enqueue("This is a test. This is another test. And this is yet another test. Finally this is the last test".getBytes());
         runner.run();
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 4);
         splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("This is a test");
-        splits.get(1).
-                assertContentEquals(". This is another test");
-        splits.get(2).
-                assertContentEquals(". And this is yet another test");
-        splits.get(3).
-                assertContentEquals(". Finally this is the last test");
+        splits.get(0).assertContentEquals("This is a test");
+        splits.get(1).assertContentEquals(". This is another test");
+        splits.get(2).assertContentEquals(". And this is yet another test");
+        splits.get(3).assertContentEquals(". Finally this is the last test");
         runner.clearTransferState();
 
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.
-                        getValue());
-        runner.
-                enqueue("This is a test. This is another test. And this is yet another test. Finally this is the last test".
-                        getBytes());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.LEADING_POSITION.getValue());
+        runner.enqueue("This is a test. This is another test. And this is yet another test. Finally this is the last test".getBytes());
         runner.run();
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 5);
         splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("This is a ");
-        splits.get(1).
-                assertContentEquals("test. This is another ");
-        splits.get(2).
-                assertContentEquals("test. And this is yet another ");
-        splits.get(3).
-                assertContentEquals("test. Finally this is the last ");
-        splits.get(4).
-                assertContentEquals("test");
+        splits.get(0).assertContentEquals("This is a ");
+        splits.get(1).assertContentEquals("test. This is another ");
+        splits.get(2).assertContentEquals("test. And this is yet another ");
+        splits.get(3).assertContentEquals("test. Finally this is the last ");
+        splits.get(4).assertContentEquals("test");
 
         runner.clearTransferState();
     }
@@ -175,13 +129,10 @@ public class TestSplitContent {
     @Test
     public void testTextFormatTrailingPosition() {
         final TestRunner runner = TestRunners.newTestRunner(new SplitContent());
-        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.
-                getValue());
+        runner.setProperty(SplitContent.FORMAT, SplitContent.UTF8_FORMAT.getValue());
         runner.setProperty(SplitContent.BYTE_SEQUENCE, "ub");
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
-        runner.
-                setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.
-                        getValue());
+        runner.setProperty(SplitContent.BYTE_SEQUENCE_LOCATION, SplitContent.TRAILING_POSITION.getValue());
 
         runner.enqueue("rub-a-dub-dub".getBytes());
         runner.run();
@@ -190,14 +141,10 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 3);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals("rub");
-        splits.get(1).
-                assertContentEquals("-a-dub");
-        splits.get(2).
-                assertContentEquals("-dub");
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        splits.get(0).assertContentEquals("rub");
+        splits.get(1).assertContentEquals("-a-dub");
+        splits.get(2).assertContentEquals("-dub");
     }
 
     @Test
@@ -206,16 +153,14 @@ public class TestSplitContent {
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "false");
         runner.setProperty(SplitContent.BYTE_SEQUENCE.getName(), "FFFF");
 
-        runner.
-                enqueue(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
+        runner.enqueue(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
         runner.run();
 
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
         final MockFlowFile split2 = splits.get(1);
 
@@ -236,8 +181,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
         final MockFlowFile split2 = splits.get(1);
 
@@ -261,8 +205,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
         final MockFlowFile split2 = splits.get(1);
 
@@ -287,8 +230,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
         final MockFlowFile split2 = splits.get(1);
 
@@ -310,8 +252,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 1);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
 
         split1.assertContentEquals(new byte[]{1, 2, 3, 4});
@@ -331,8 +272,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 1);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
 
         split1.assertContentEquals(new byte[]{1, 2, 3, 4, 5, 5, 5, 5});
@@ -352,8 +292,7 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 1);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
 
         split1.assertContentEquals(new byte[]{1, 2, 3, 4});
@@ -373,12 +312,9 @@ public class TestSplitContent {
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
-        splits.get(0).
-                assertContentEquals(new byte[]{5, 5, 5, 5});
-        splits.get(1).
-                assertContentEquals(new byte[]{1, 2, 3, 4});
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        splits.get(0).assertContentEquals(new byte[]{5, 5, 5, 5});
+        splits.get(1).assertContentEquals(new byte[]{1, 2, 3, 4});
     }
 
     @Test
@@ -387,29 +323,23 @@ public class TestSplitContent {
         runner.setProperty(SplitContent.KEEP_SEQUENCE, "true");
         runner.setProperty(SplitContent.BYTE_SEQUENCE.getName(), "FFFF");
 
-        runner.
-                enqueue(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
+        runner.enqueue(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
         runner.run();
 
         runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1);
         runner.assertTransferCount(SplitContent.REL_SPLITS, 2);
 
         runner.assertQueueEmpty();
-        final List<MockFlowFile> splits = runner.
-                getFlowFilesForRelationship(SplitContent.REL_SPLITS);
+        final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS);
         final MockFlowFile split1 = splits.get(0);
         final MockFlowFile split2 = splits.get(1);
 
-        split1.
-                assertContentEquals(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF});
+        split1.assertContentEquals(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF});
         split2.assertContentEquals(new byte[]{(byte) 0xFF, 5, 4, 3, 2, 1});
 
-        final TestRunner mergeRunner = TestRunners.
-                newTestRunner(new MergeContent());
-        mergeRunner.
-                setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
-        mergeRunner.
-                setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
+        final TestRunner mergeRunner = TestRunners.newTestRunner(new MergeContent());
+        mergeRunner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT);
+        mergeRunner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT);
         mergeRunner.enqueue(splits.toArray(new MockFlowFile[0]));
         mergeRunner.run();
 
@@ -417,9 +347,7 @@ public class TestSplitContent {
         mergeRunner.assertTransferCount(MergeContent.REL_ORIGINAL, 2);
         mergeRunner.assertTransferCount(MergeContent.REL_FAILURE, 0);
 
-        final List<MockFlowFile> packed = mergeRunner.
-                getFlowFilesForRelationship(MergeContent.REL_MERGED);
-        packed.get(0).
-                assertContentEquals(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
+        final List<MockFlowFile> packed = mergeRunner.getFlowFilesForRelationship(MergeContent.REL_MERGED);
+        packed.get(0).assertContentEquals(new byte[]{1, 2, 3, 4, 5, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 5, 4, 3, 2, 1});
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitJson.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitJson.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitJson.java
index 9503182..fc07386 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitJson.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestSplitJson.java
@@ -35,18 +35,15 @@ import java.nio.file.Paths;
 
 public class TestSplitJson {
 
-    private static final Path JSON_SNIPPET = Paths.
-            get("src/test/resources/TestJson/json-sample.json");
-    private static final Path XML_SNIPPET = Paths.
-            get("src/test/resources/TestXml/xml-snippet.xml");
+    private static final Path JSON_SNIPPET = Paths.get("src/test/resources/TestJson/json-sample.json");
+    private static final Path XML_SNIPPET = Paths.get("src/test/resources/TestXml/xml-snippet.xml");
 
     @Test(expected = AssertionError.class)
     public void testInvalidJsonPath() {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
         testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$..");
 
-        Assert.
-                fail("An improper JsonPath expression was not detected as being invalid.");
+        Assert.fail("An improper JsonPath expression was not detected as being invalid.");
     }
 
     @Test
@@ -58,9 +55,7 @@ public class TestSplitJson {
         testRunner.run();
 
         testRunner.assertAllFlowFilesTransferred(SplitJson.REL_FAILURE, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(SplitJson.REL_FAILURE).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(SplitJson.REL_FAILURE).get(0);
         // Verify that the content was unchanged
         out.assertContentEquals(XML_SNIPPET);
     }
@@ -76,36 +71,28 @@ public class TestSplitJson {
         Relationship expectedRel = SplitJson.REL_FAILURE;
 
         testRunner.assertAllFlowFilesTransferred(expectedRel, 1);
-        final MockFlowFile out = testRunner.
-                getFlowFilesForRelationship(expectedRel).
-                get(0);
+        final MockFlowFile out = testRunner.getFlowFilesForRelationship(expectedRel).get(0);
         out.assertContentEquals(JSON_SNIPPET);
     }
 
     @Test
     public void testSplit_arrayResult_oneValue() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[0].range[?(@ == 0)]");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[0].range[?(@ == 0)]");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
 
         testRunner.assertTransferCount(SplitJson.REL_ORIGINAL, 1);
         testRunner.assertTransferCount(SplitJson.REL_SPLIT, 1);
-        testRunner.getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).
-                get(0).
-                assertContentEquals(JSON_SNIPPET);
-        testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).
-                get(0).
-                assertContentEquals("0");
+        testRunner.getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).get(0).assertContentEquals(JSON_SNIPPET);
+        testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).get(0).assertContentEquals("0");
     }
 
     @Test
     public void testSplit_arrayResult_multipleValues() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[0].range");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[0].range");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
@@ -114,63 +101,49 @@ public class TestSplitJson {
 
         testRunner.assertTransferCount(SplitJson.REL_ORIGINAL, 1);
         testRunner.assertTransferCount(SplitJson.REL_SPLIT, numSplitsExpected);
-        final MockFlowFile originalOut = testRunner.
-                getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).
-                get(0);
+        final MockFlowFile originalOut = testRunner.getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).get(0);
         originalOut.assertContentEquals(JSON_SNIPPET);
     }
 
     @Test
     public void testSplit_arrayResult_nonScalarValues() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[*].name");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$[*].name");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
 
         testRunner.assertTransferCount(SplitJson.REL_ORIGINAL, 1);
         testRunner.assertTransferCount(SplitJson.REL_SPLIT, 7);
-        testRunner.getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).
-                get(0).
-                assertContentEquals(JSON_SNIPPET);
-        testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).
-                get(0).
-                assertContentEquals("{\"first\":\"Shaffer\",\"last\":\"Pearson\"}");
+        testRunner.getFlowFilesForRelationship(SplitJson.REL_ORIGINAL).get(0).assertContentEquals(JSON_SNIPPET);
+        testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).get(0).assertContentEquals("{\"first\":\"Shaffer\",\"last\":\"Pearson\"}");
     }
 
     @Test
     public void testSplit_pathNotFound() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.nonexistent");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.nonexistent");
 
         testRunner.enqueue(JSON_SNIPPET);
         testRunner.run();
 
         testRunner.assertTransferCount(SplitJson.REL_FAILURE, 1);
-        testRunner.getFlowFilesForRelationship(SplitJson.REL_FAILURE).
-                get(0).
-                assertContentEquals(JSON_SNIPPET);
+        testRunner.getFlowFilesForRelationship(SplitJson.REL_FAILURE).get(0).assertContentEquals(JSON_SNIPPET);
     }
 
     @Test
     public void testSplit_pathToNullValue() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.nullField");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.nullField");
 
-        ProcessSession session = testRunner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = testRunner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
 
         ff = session.write(ff, new OutputStreamCallback() {
             @Override
             public void process(OutputStream out) throws IOException {
                 try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                    outputStream.
-                            write("{\"stringField\": \"String Value\", \"nullField\": null}".
-                                    getBytes(StandardCharsets.UTF_8));
+                    outputStream.write("{\"stringField\": \"String Value\", \"nullField\": null}".getBytes(StandardCharsets.UTF_8));
                 }
             }
         });
@@ -184,20 +157,16 @@ public class TestSplitJson {
     @Test
     public void testSplit_pathToArrayWithNulls_emptyStringRepresentation() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.arrayOfNulls");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.arrayOfNulls");
 
-        ProcessSession session = testRunner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = testRunner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
 
         ff = session.write(ff, new OutputStreamCallback() {
             @Override
             public void process(OutputStream out) throws IOException {
                 try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                    outputStream.
-                            write("{\"stringField\": \"String Value\", \"arrayOfNulls\": [null, null, null]}".
-                                    getBytes(StandardCharsets.UTF_8));
+                    outputStream.write("{\"stringField\": \"String Value\", \"arrayOfNulls\": [null, null, null]}".getBytes(StandardCharsets.UTF_8));
                 }
             }
         });
@@ -209,31 +178,25 @@ public class TestSplitJson {
         int expectedFiles = 3;
         testRunner.assertTransferCount(SplitJson.REL_SPLIT, expectedFiles);
         for (int i = 0; i < expectedFiles; i++) {
-            testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).
-                    get(i).
-                    assertContentEquals("");
+            testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).get(i).assertContentEquals("");
         }
     }
 
     @Test
     public void testSplit_pathToArrayWithNulls_nullStringRepresentation() throws Exception {
         final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
-        testRunner.
-                setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.arrayOfNulls");
+        testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.arrayOfNulls");
         testRunner.setProperty(SplitJson.NULL_VALUE_DEFAULT_REPRESENTATION,
                 AbstractJsonPathProcessor.NULL_STRING_OPTION);
 
-        ProcessSession session = testRunner.getProcessSessionFactory().
-                createSession();
+        ProcessSession session = testRunner.getProcessSessionFactory().createSession();
         FlowFile ff = session.create();
 
         ff = session.write(ff, new OutputStreamCallback() {
             @Override
             public void process(OutputStream out) throws IOException {
                 try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                    outputStream.
-                            write("{\"stringField\": \"String Value\", \"arrayOfNulls\": [null, null, null]}".
-                                    getBytes(StandardCharsets.UTF_8));
+                    outputStream.write("{\"stringField\": \"String Value\", \"arrayOfNulls\": [null, null, null]}".getBytes(StandardCharsets.UTF_8));
                 }
             }
         });
@@ -245,9 +208,7 @@ public class TestSplitJson {
         int expectedFiles = 3;
         testRunner.assertTransferCount(SplitJson.REL_SPLIT, expectedFiles);
         for (int i = 0; i < expectedFiles; i++) {
-            testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).
-                    get(i).
-                    assertContentEquals("null");
+            testRunner.getFlowFilesForRelationship(SplitJson.REL_SPLIT).get(i).assertContentEquals("null");
         }
     }
 }


[38/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
index 5d233f7..137cc07 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ConnectionResource.java
@@ -85,8 +85,8 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Populate the uri's for the specified processors and their relationships.
      *
-     * @param connections
-     * @return
+     * @param connections connections
+     * @return dtos
      */
     public Set<ConnectionDTO> populateRemainingConnectionsContent(Set<ConnectionDTO> connections) {
         for (ConnectionDTO connection : connections) {
@@ -97,9 +97,6 @@ public class ConnectionResource extends ApplicationResource {
 
     /**
      * Populate the uri's for the specified processor and its relationships.
-     *
-     * @param connection
-     * @return
      */
     private ConnectionDTO populateRemainingConnectionContent(ConnectionDTO connection) {
         // populate the remaining properties
@@ -110,9 +107,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Gets all the connections.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A connectionsEntity.
      */
     @GET
@@ -145,9 +140,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection.
      * @return A connectionEntity.
      */
@@ -183,9 +176,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Retrieves the specified connection status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection to retrieve.
      * @return A statusHistoryEntity.
      */
@@ -220,12 +211,9 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Creates a connection.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of the connection.
      * @param sourceId The id of the source connectable.
      * @param sourceGroupId The parent group id for the source.
@@ -233,19 +221,14 @@ public class ConnectionResource extends ApplicationResource {
      * @param bends Array of bend points in string form ["x,y", "x,y", "x,y"]
      * @param relationships Array of relationships.
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply
-     * back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply
-     * back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer
-     * to one of the types in the GET /controller/prioritizers response. If this
-     * parameter is not specified no change will be made. If this parameter
-     * appears with no value (empty string), it will be treated as an empty
-     * array.
+     * @param backPressureObjectThreshold The object count for when to apply back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
+     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The parent group id for the destination.
      * @param destinationType The type of the destination connectable.
-     * @param formParams
+     * @param formParams params
      * @return A connectionEntity.
      */
     @POST
@@ -378,7 +361,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Creates a new connection.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param connectionEntity A connectionEntity.
      * @return A connectionEntity.
      */
@@ -466,12 +449,9 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Updates the specified relationship target.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param connectionId The id of the source processor.
      * @param name The name of the connection.
      * @param relationships Array of relationships.
@@ -479,19 +459,14 @@ public class ConnectionResource extends ApplicationResource {
      * @param labelIndex The control point index for the connection label
      * @param zIndex The zIndex for this connection
      * @param flowFileExpiration The flow file expiration in minutes
-     * @param backPressureObjectThreshold The object count for when to apply
-     * back pressure.
-     * @param backPressureDataSizeThreshold The object size for when to apply
-     * back pressure.
-     * @param prioritizers Array of prioritizer types. These types should refer
-     * to one of the types in the GET /controller/prioritizers response. If this
-     * parameter is not specified no change will be made. If this parameter
-     * appears with no value (empty string), it will be treated as an empty
-     * array.
+     * @param backPressureObjectThreshold The object count for when to apply back pressure.
+     * @param backPressureDataSizeThreshold The object size for when to apply back pressure.
+     * @param prioritizers Array of prioritizer types. These types should refer to one of the types in the GET /controller/prioritizers response. If this parameter is not specified no change will be
+     * made. If this parameter appears with no value (empty string), it will be treated as an empty array.
      * @param destinationId The id of the destination connectable.
      * @param destinationGroupId The group id of the destination.
      * @param destinationType The type of the destination type.
-     * @param formParams
+     * @param formParams params
      * @return A connectionEntity.
      */
     @PUT
@@ -627,7 +602,7 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Updates the specified connection.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the connection.
      * @param connectionEntity A connectionEntity.
      * @return A connectionEntity.
@@ -698,12 +673,9 @@ public class ConnectionResource extends ApplicationResource {
     /**
      * Removes the specified connection.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the connection.
      * @return An Entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
index 9228be4..9e34201 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerResource.java
@@ -93,7 +93,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the Provenance sub-resource.
      *
-     * @return
+     * @return the Provenance sub-resource
      */
     @Path("/provenance")
     public ProvenanceResource getProvenanceResource() {
@@ -103,7 +103,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the User sub-resource.
      *
-     * @return
+     * @return the User sub-resource
      */
     @Path("/users")
     public UserResource getUserResource() {
@@ -113,7 +113,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the User sub-resource.
      *
-     * @return
+     * @return the User sub-resource
      */
     @Path("/user-groups")
     public UserGroupResource getUserGroupResource() {
@@ -123,7 +123,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the History sub-resource.
      *
-     * @return
+     * @return the History sub-resource
      */
     @Path("/history")
     public HistoryResource getHistoryResource() {
@@ -133,7 +133,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the History sub-resource.
      *
-     * @return
+     * @return the History sub-resource
      */
     @Path("/bulletin-board")
     public BulletinBoardResource getBulletinBoardResource() {
@@ -143,7 +143,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the Template sub-resource.
      *
-     * @return
+     * @return the Template sub-resource
      */
     @Path("/templates")
     public TemplateResource getTemplateResource() {
@@ -153,7 +153,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the Snippets sub-resource.
      *
-     * @return
+     * @return the Snippets sub-resource
      */
     @Path("/snippets")
     public SnippetResource getSnippetResource() {
@@ -163,7 +163,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the Controller Services sub-resource.
      *
-     * @return
+     * @return the Controller Services sub-resource
      */
     @Path("/controller-services")
     public ControllerServiceResource getControllerServiceResource() {
@@ -173,7 +173,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Locates the Reporting Tasks sub-resource.
      *
-     * @return
+     * @return the Reporting Tasks sub-resource
      */
     @Path("/reporting-tasks")
     public ReportingTaskResource getReportingTaskResource() {
@@ -184,7 +184,7 @@ public class ControllerResource extends ApplicationResource {
      * Locates the Group sub-resource.
      *
      * @param groupId The process group id
-     * @return
+     * @return the Group sub-resource
      */
     @Path("/process-groups/{process-group-id}")
     public ProcessGroupResource getGroupResource(@PathParam("process-group-id") String groupId) {
@@ -194,8 +194,7 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Returns a 200 OK response to indicate this is a valid controller
-     * endpoint.
+     * Returns a 200 OK response to indicate this is a valid controller endpoint.
      *
      * @return An OK response with an empty entity body.
      */
@@ -212,9 +211,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Returns the details of this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerEntity.
      */
     @GET
@@ -272,22 +269,14 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Creates a new archive of this flow controller. Note, this is a POST
-     * operation that returns a URI that is not representative of the thing that
-     * was actually created. The archive that is created cannot be referenced at
-     * a later time, therefore there is no corresponding URI. Instead the
-     * request URI is returned.
+     * Creates a new archive of this flow controller. Note, this is a POST operation that returns a URI that is not representative of the thing that was actually created. The archive that is created
+     * cannot be referenced at a later time, therefore there is no corresponding URI. Instead the request URI is returned.
      *
-     * Alternatively, we could have performed a PUT request. However, PUT
-     * requests are supposed to be idempotent and this endpoint is certainly
-     * not.
+     * Alternatively, we could have performed a PUT request. However, PUT requests are supposed to be idempotent and this endpoint is certainly not.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processGroupEntity.
      */
     @POST
@@ -361,9 +350,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the status for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerStatusEntity.
      */
     @GET
@@ -391,9 +378,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the counters report for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A countersEntity.
      */
     @GET
@@ -421,10 +406,8 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Update the specified counter. This will reset the counter value to 0.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the counter.
      * @return A counterEntity.
      */
@@ -468,9 +451,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the configuration for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerConfigurationEntity.
      */
     @GET
@@ -503,18 +484,13 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Update the configuration for this NiFi.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of this controller.
      * @param comments The comments of this controller.
-     * @param maxTimerDrivenThreadCount The maximum number of timer driven
-     * threads this controller has available.
-     * @param maxEventDrivenThreadCount The maximum number of timer driven
-     * threads this controller has available.
+     * @param maxTimerDrivenThreadCount The maximum number of timer driven threads this controller has available.
+     * @param maxEventDrivenThreadCount The maximum number of timer driven threads this controller has available.
      * @return A controllerConfigurationEntity.
      */
     @PUT
@@ -565,7 +541,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Update the configuration for this NiFi.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param configEntity A controllerConfigurationEntity.
      * @return A controllerConfigurationEntity.
      */
@@ -624,12 +600,9 @@ public class ControllerResource extends ApplicationResource {
     }
 
     /**
-     * Retrieves the user details, including the authorities, about the user
-     * making the request.
+     * Retrieves the user details, including the authorities, about the user making the request.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A authoritiesEntity.
      */
     @GET
@@ -661,9 +634,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the banners for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A bannerEntity.
      */
     @GET
@@ -701,9 +672,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of processors that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processorTypesEntity.
      */
     @GET
@@ -734,9 +703,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of controller services that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param serviceType Returns only services that implement this type
      * @return A controllerServicesTypesEntity.
      */
@@ -770,9 +737,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of reporting tasks that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A controllerServicesTypesEntity.
      */
     @GET
@@ -803,9 +768,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves the types of prioritizers that this NiFi supports.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A prioritizerTypesEntity.
      */
     @GET
@@ -836,9 +799,7 @@ public class ControllerResource extends ApplicationResource {
     /**
      * Retrieves details about this NiFi to put in the About dialog.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return An aboutEntity.
      */
     @GET

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
index bd3daf2..90d031d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ControllerServiceResource.java
@@ -88,8 +88,8 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Populates the uri for the specified controller service.
      *
-     * @param controllerServices
-     * @return
+     * @param controllerServices services
+     * @return dtos
      */
     private Set<ControllerServiceDTO> populateRemainingControllerServicesContent(final String availability, final Set<ControllerServiceDTO> controllerServices) {
         for (ControllerServiceDTO controllerService : controllerServices) {
@@ -121,11 +121,10 @@ public class ControllerServiceResource extends ApplicationResource {
     }
 
     /**
-     * Parses the availability and ensure that the specified availability makes
-     * sense for the given NiFi instance.
+     * Parses the availability and ensure that the specified availability makes sense for the given NiFi instance.
      *
-     * @param availability
-     * @return
+     * @param availability avail
+     * @return avail
      */
     private Availability parseAvailability(final String availability) {
         final Availability avail;
@@ -146,12 +145,9 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves all the of controller services in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @return A controllerServicesEntity.
      */
     @GET
@@ -186,15 +182,11 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Creates a new controller service.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param type The type of controller service to create.
      * @return A controllerServiceEntity.
      */
@@ -233,10 +225,9 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Creates a new Controller Service.
      *
-     * @param httpServletRequest
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param controllerServiceEntity A controllerServiceEntity.
      * @return A controllerServiceEntity.
      */
@@ -324,12 +315,9 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves the specified controller service.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to retrieve
      * @return A controllerServiceEntity.
      */
@@ -366,10 +354,8 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Returns the descriptor for the specified property.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability avail
      * @param id The id of the controller service.
      * @param propertyName The property
      * @return a propertyDescriptorEntity
@@ -415,12 +401,9 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Retrieves the references of the specified controller service.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to retrieve
      * @return A controllerServiceEntity.
      */
@@ -458,20 +441,14 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Updates the references of the specified controller service.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to retrieve
-     * @param state Sets the state of referencing components. A value of RUNNING
-     * or STOPPED will update referencing schedulable components (Processors and
-     * Reporting Tasks). A value of ENABLED or DISABLED will update referencing
-     * controller services.
+     * @param state Sets the state of referencing components. A value of RUNNING or STOPPED will update referencing schedulable components (Processors and Reporting Tasks). A value of ENABLED or
+     * DISABLED will update referencing controller services.
      * @return A controllerServiceEntity.
      */
     @PUT
@@ -558,27 +535,19 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Updates the specified controller service.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to update.
      * @param name The name of the controller service
      * @param annotationData The annotation data for the controller service
      * @param comments The comments for the controller service
-     * @param state The state of this controller service. Should be ENABLED or
-     * DISABLED.
-     * @param markedForDeletion Array of property names whose value should be
-     * removed.
-     * @param formParams Additionally, the processor properties and styles are
-     * specified in the form parameters. Because the property names and styles
-     * differ from processor to processor they are specified in a map-like
-     * fashion:
+     * @param state The state of this controller service. Should be ENABLED or DISABLED.
+     * @param markedForDeletion Array of property names whose value should be removed.
+     * @param formParams Additionally, the processor properties and styles are specified in the form parameters. Because the property names and styles differ from processor to processor they are
+     * specified in a map-like fashion:
      * <br>
      * <ul>
      * <li>properties[required.file.path]=/path/to/file</li>
@@ -662,10 +631,9 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Updates the specified a new Controller Service.
      *
-     * @param httpServletRequest
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to update.
      * @param controllerServiceEntity A controllerServiceEntity.
      * @return A controllerServiceEntity.
@@ -740,15 +708,11 @@ public class ControllerServiceResource extends ApplicationResource {
     /**
      * Removes the specified controller service.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the controller service is available on the
-     * NCM only (ncm) or on the nodes only (node). If this instance is not
-     * clustered all services should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the controller service is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all services should use the node
+     * availability.
      * @param id The id of the controller service to remove.
      * @return A entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
index 4e6095e..fd97dca 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/FunnelResource.java
@@ -73,8 +73,8 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Populates the uri for the specified funnels.
      *
-     * @param funnels
-     * @return
+     * @param funnels funnels
+     * @return funnels
      */
     public Set<FunnelDTO> populateRemainingFunnelsContent(Set<FunnelDTO> funnels) {
         for (FunnelDTO funnel : funnels) {
@@ -95,9 +95,7 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Retrieves all the of funnels in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A funnelsEntity.
      */
     @GET
@@ -130,12 +128,9 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Creates a new funnel.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @return A funnelEntity.
@@ -179,7 +174,7 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Creates a new Funnel.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param funnelEntity A funnelEntity.
      * @return A funnelEntity.
      */
@@ -259,9 +254,7 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Retrieves the specified funnel.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the funnel to retrieve
      * @return A funnelEntity.
      */
@@ -295,12 +288,9 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Updates the specified funnel.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the funnel to update.
      * @param parentGroupId The id of the process group to move this funnel to.
      * @param x The x coordinate for this funnels position.
@@ -350,7 +340,7 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Creates a new Funnel.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the funnel to update.
      * @param funnelEntity A funnelEntity.
      * @return A funnelEntity.
@@ -422,12 +412,9 @@ public class FunnelResource extends ApplicationResource {
     /**
      * Removes the specified funnel.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the funnel to remove.
      * @return A entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
index 49bede82..749863c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/HistoryResource.java
@@ -51,29 +51,17 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Queries the history of this Controller.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param offset The offset into the data. This parameter is required and is
-     * used in conjunction with count.
-     * @param count The number of rows that should be returned. This parameter
-     * is required and is used in conjunction with page.
-     * @param sortColumn The column to sort on. This parameter is optional. If
-     * not specified the results will be returned with the most recent first.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param offset The offset into the data. This parameter is required and is used in conjunction with count.
+     * @param count The number of rows that should be returned. This parameter is required and is used in conjunction with page.
+     * @param sortColumn The column to sort on. This parameter is optional. If not specified the results will be returned with the most recent first.
      * @param sortOrder The sort order.
-     * @param startDate The start date/time for the query. The start date/time
-     * must be formatted as 'MM/dd/yyyy HH:mm:ss'. This parameter is optional
-     * and must be specified in the timezone of the server. The server's
-     * timezone can be determined by inspecting the result of a status or
-     * history request.
-     * @param endDate The end date/time for the query. The end date/time must be
-     * formatted as 'MM/dd/yyyy HH:mm:ss'. This parameter is optional and must
-     * be specified in the timezone of the server. The server's timezone can be
-     * determined by inspecting the result of a status or history request.
-     * @param userName The user name of the user who's actions are being
-     * queried. This parameter is optional.
-     * @param sourceId The id of the source being queried (usually a processor
-     * id). This parameter is optional.
+     * @param startDate The start date/time for the query. The start date/time must be formatted as 'MM/dd/yyyy HH:mm:ss'. This parameter is optional and must be specified in the timezone of the
+     * server. The server's timezone can be determined by inspecting the result of a status or history request.
+     * @param endDate The end date/time for the query. The end date/time must be formatted as 'MM/dd/yyyy HH:mm:ss'. This parameter is optional and must be specified in the timezone of the server. The
+     * server's timezone can be determined by inspecting the result of a status or history request.
+     * @param userName The user name of the user who's actions are being queried. This parameter is optional.
+     * @param sourceId The id of the source being queried (usually a processor id). This parameter is optional.
      * @return A historyEntity.
      */
     @GET
@@ -160,9 +148,7 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Gets the action for the corresponding id.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the action to get.
      * @return An actionEntity.
      */
@@ -198,9 +184,7 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Deletes flow history from the specified end date.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param endDate The end date for the purge action.
      * @return A historyEntity
      */
@@ -235,9 +219,7 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Gets the actions for the specified processor.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param processorId The id of the processor.
      * @return An processorHistoryEntity.
      */
@@ -266,9 +248,7 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Gets the actions for the specified controller service.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param controllerServiceId The id of the controller service.
      * @return An componentHistoryEntity.
      */
@@ -297,9 +277,7 @@ public class HistoryResource extends ApplicationResource {
     /**
      * Gets the actions for the specified reporting task.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param reportingTaskId The id of the reporting task.
      * @return An componentHistoryEntity.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
index 9b5eeba..4e446fb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/InputPortResource.java
@@ -75,8 +75,8 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Populates the uri for the specified input ports.
      *
-     * @param inputPorts
-     * @return
+     * @param inputPorts ports
+     * @return ports
      */
     public Set<PortDTO> populateRemainingInputPortsContent(Set<PortDTO> inputPorts) {
         for (PortDTO inputPort : inputPorts) {
@@ -97,9 +97,7 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Retrieves all the of input ports in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A inputPortsEntity.
      */
     @GET
@@ -132,12 +130,9 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Creates a new input port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @param name The input ports name.
@@ -185,7 +180,7 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Creates a new input port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param portEntity A inputPortEntity.
      * @return A inputPortEntity.
      */
@@ -266,9 +261,7 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Retrieves the specified input port.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the input port to retrieve
      * @return A inputPortEntity.
      */
@@ -302,12 +295,9 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Updates the specified input port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the input port to update.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
@@ -316,9 +306,8 @@ public class InputPortResource extends ApplicationResource {
      * @param comments Any comments about this input port.
      * @param name The input ports name.
      * @param state The state of this port.
-     * @param concurrentlySchedulableTaskCount The number of concurrently
-     * schedulable tasks.
-     * @param formParams
+     * @param concurrentlySchedulableTaskCount The number of concurrently schedulable tasks.
+     * @param formParams form params
      * @return A inputPortEntity.
      */
     @PUT
@@ -387,7 +376,7 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Updates the specified input port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the input port to update.
      * @param portEntity A inputPortEntity.
      * @return A inputPortEntity.
@@ -460,12 +449,9 @@ public class InputPortResource extends ApplicationResource {
     /**
      * Removes the specified input port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the input port to remove.
      * @return A inputPortEntity.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
index 4905ad3..6b12d0e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/LabelResource.java
@@ -75,8 +75,8 @@ public class LabelResource extends ApplicationResource {
     /**
      * Populates the uri for the specified labels.
      *
-     * @param labels
-     * @return
+     * @param labels labels
+     * @return dtos
      */
     public Set<LabelDTO> populateRemainingLabelsContent(Set<LabelDTO> labels) {
         for (LabelDTO label : labels) {
@@ -97,9 +97,7 @@ public class LabelResource extends ApplicationResource {
     /**
      * Retrieves all the of labels in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A labelsEntity.
      */
     @GET
@@ -132,12 +130,9 @@ public class LabelResource extends ApplicationResource {
     /**
      * Creates a new label.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @param width The width of the label.
@@ -195,7 +190,7 @@ public class LabelResource extends ApplicationResource {
     /**
      * Creates a new Label.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param labelEntity A labelEntity.
      * @return A labelEntity.
      */
@@ -275,9 +270,7 @@ public class LabelResource extends ApplicationResource {
     /**
      * Retrieves the specified label.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the label to retrieve
      * @return A labelEntity.
      */
@@ -311,20 +304,16 @@ public class LabelResource extends ApplicationResource {
     /**
      * Updates the specified label.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the label to update.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @param width The width of the label.
      * @param height The height of the label.
      * @param label The label's value.
-     * @param formParams Additionally, the label styles are specified in the
-     * form parameters. They are specified in a map-like fashion:
+     * @param formParams Additionally, the label styles are specified in the form parameters. They are specified in a map-like fashion:
      * <br>
      * <ul>
      * <li>style[background-color]=#aaaaaa</li>
@@ -405,7 +394,7 @@ public class LabelResource extends ApplicationResource {
     /**
      * Updates the specified label.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the label to update.
      * @param labelEntity A labelEntity.
      * @return A labelEntity.
@@ -477,12 +466,9 @@ public class LabelResource extends ApplicationResource {
     /**
      * Removes the specified label.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the label to remove.
      * @return A entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
index db3d040..bb0eba9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/NodeResource.java
@@ -54,9 +54,7 @@ public class NodeResource extends ApplicationResource {
     /**
      * Gets the contents of the specified node in this NiFi cluster.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The node id.
      * @return A nodeEntity.
      */
@@ -92,9 +90,7 @@ public class NodeResource extends ApplicationResource {
     /**
      * Gets the status for the specified node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the node.
      * @return A processGroupStatusEntity
      */
@@ -128,9 +124,7 @@ public class NodeResource extends ApplicationResource {
     /**
      * Gets the system diagnositics for the specified node.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the node.
      * @return A systemDiagnosticsEntity
      */
@@ -164,9 +158,7 @@ public class NodeResource extends ApplicationResource {
     /**
      * Updates the contents of the specified node in this NiFi cluster.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the node.
      * @param status The status of the node.
      * @param primary Whether the node should be make primary.
@@ -256,9 +248,7 @@ public class NodeResource extends ApplicationResource {
     /**
      * Removes the specified from this NiFi cluster.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the node
      * @return A nodeEntity
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
index 168ec90..a600d35 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/OutputPortResource.java
@@ -75,8 +75,8 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Populates the uri for the specified output ports.
      *
-     * @param outputPorts
-     * @return
+     * @param outputPorts ports
+     * @return dtos
      */
     public Set<PortDTO> populateRemainingOutputPortsContent(Set<PortDTO> outputPorts) {
         for (PortDTO outputPort : outputPorts) {
@@ -97,9 +97,7 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Retrieves all the of output ports in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A outputPortsEntity.
      */
     @GET
@@ -132,12 +130,9 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Creates a new output port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @param name The output ports name.
@@ -185,7 +180,7 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Creates a new output port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param portEntity A outputPortEntity.
      * @return A outputPortEntity.
      */
@@ -266,9 +261,7 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Retrieves the specified output port.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the output port to retrieve
      * @return A outputPortEntity.
      */
@@ -302,12 +295,9 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Updates the specified output port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the output port to update.
      * @param x The x coordinate for this output ports position.
      * @param y The y coordinate for this output ports position.
@@ -316,9 +306,8 @@ public class OutputPortResource extends ApplicationResource {
      * @param groupAccessControl The allowed groups for this output port.
      * @param userAccessControl The allowed users for this output port.
      * @param state The state of this port.
-     * @param concurrentlySchedulableTaskCount The number of concurrently
-     * schedulable tasks.
-     * @param formParams
+     * @param concurrentlySchedulableTaskCount The number of concurrently schedulable tasks.
+     * @param formParams params
      * @return A outputPortEntity.
      */
     @PUT
@@ -388,7 +377,7 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Updates the specified output port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the output port to update.
      * @param portEntity A outputPortEntity.
      * @return A outputPortEntity.
@@ -461,12 +450,9 @@ public class OutputPortResource extends ApplicationResource {
     /**
      * Removes the specified output port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the output port to remove.
      * @return A outputPortEntity.
      */


[16/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/6a706458
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/6a706458
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/6a706458

Branch: refs/heads/NIFI-292
Commit: 6a706458d093af061e6b957d99de3ca1bc1f4c30
Parents: 666de3d
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 13:25:44 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 13:25:44 2015 -0400

----------------------------------------------------------------------
 nifi-parent/pom.xml                             |   6 +-
 .../nifi/processors/standard/BinFiles.java      |  15 +-
 .../processors/standard/CompressContent.java    |   9 +-
 .../nifi/processors/standard/ControlRate.java   |  11 +-
 .../standard/ConvertCharacterSet.java           |  22 ++-
 .../processors/standard/DistributeLoad.java     |  17 +-
 .../processors/standard/EvaluateJsonPath.java   |  36 ++--
 .../nifi/processors/standard/EvaluateXPath.java |   9 +-
 .../processors/standard/EvaluateXQuery.java     |   6 +-
 .../processors/standard/ExecuteProcess.java     |   9 +-
 .../standard/ExecuteStreamCommand.java          |  21 +--
 .../nifi/processors/standard/ExtractText.java   |   3 +-
 .../processors/standard/GenerateFlowFile.java   |   3 +-
 .../nifi/processors/standard/GetFile.java       |   6 +-
 .../nifi/processors/standard/GetJMSTopic.java   |   3 +-
 .../processors/standard/HandleHttpRequest.java  |  17 +-
 .../processors/standard/HandleHttpResponse.java |   3 +-
 .../nifi/processors/standard/HashAttribute.java |  10 +-
 .../nifi/processors/standard/InvokeHTTP.java    |  24 ++-
 .../nifi/processors/standard/JmsConsumer.java   |   6 +-
 .../nifi/processors/standard/ListenUDP.java     | 178 +++++++++----------
 .../nifi/processors/standard/MergeContent.java  |   6 +-
 .../nifi/processors/standard/PostHTTP.java      |  47 +++--
 .../nifi/processors/standard/PutEmail.java      |   6 +-
 .../apache/nifi/processors/standard/PutFTP.java |   9 +-
 .../processors/standard/PutFileTransfer.java    |  11 +-
 .../apache/nifi/processors/standard/PutJMS.java |   3 +-
 .../nifi/processors/standard/PutSFTP.java       |   6 +-
 .../nifi/processors/standard/ReplaceText.java   |  28 ++-
 .../standard/ReplaceTextWithMapping.java        |  13 +-
 .../processors/standard/RouteOnAttribute.java   |   6 +-
 .../nifi/processors/standard/ScanAttribute.java |   5 +-
 .../nifi/processors/standard/SplitContent.java  |   9 +-
 .../nifi/processors/standard/SplitText.java     |  18 +-
 .../nifi/processors/standard/SplitXml.java      |   3 +-
 .../nifi/processors/standard/TransformXml.java  |  52 +++---
 .../nifi/processors/standard/UnpackContent.java |  39 ++--
 .../nifi/processors/standard/ValidateXml.java   |  51 +++---
 .../servlets/ContentAcknowledgmentServlet.java  |   5 -
 .../standard/servlets/ListenHTTPServlet.java    |   5 -
 .../nifi/processors/standard/util/Bin.java      |  22 +--
 .../processors/standard/util/BinManager.java    |   2 +-
 .../standard/util/DocumentReaderCallback.java   |  10 +-
 .../processors/standard/util/FTPTransfer.java   |   3 +-
 .../nifi/processors/standard/util/FTPUtils.java |   2 +-
 .../processors/standard/util/FileTransfer.java  |  29 ++-
 .../processors/standard/util/SFTPTransfer.java  |   9 +-
 .../standard/util/XmlSplitterSaxParser.java     |  11 +-
 .../processors/standard/TestDistributeLoad.java |   3 +-
 .../standard/TestHandleHttpRequest.java         |   4 +-
 50 files changed, 374 insertions(+), 457 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi-parent/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-parent/pom.xml b/nifi-parent/pom.xml
index 2e888c5..bdb94c8 100644
--- a/nifi-parent/pom.xml
+++ b/nifi-parent/pom.xml
@@ -253,7 +253,7 @@
                         <module name="Checker">
                             <property name="charset" value="UTF-8" />
                             <property name="severity" value="warning" />
-                            <!-- Checks for whitespace                               -->
+                            <!-- Checks for whitespace                              -->
                             <!-- See http://checkstyle.sf.net/config_whitespace.html -->
                             <module name="FileTabCharacter">
                                 <property name="eachLine" value="true" />
@@ -267,10 +267,6 @@
                                     <property name="format" value="[@]see\s+[{][@]link" />
                                     <property name="message" value="Javadoc @see does not need @link: pick one or the other." />
                                 </module>
-                                <module name="RegexpSinglelineJava">
-                                    <property name="format" value="jline[.]internal[.]Preconditions" />
-                                    <property name="message" value="Please use Guava Preconditions not JLine" />
-                                </module>
                                 <module name="OuterTypeFilename" />
                                 <module name="LineLength">
                                     <!-- needs extra, because Eclipse formatter ignores the ending left brace -->

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/BinFiles.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/BinFiles.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/BinFiles.java
index 6452c23..399a12b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/BinFiles.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/BinFiles.java
@@ -225,8 +225,7 @@ public abstract class BinFiles extends AbstractSessionFactoryProcessor {
         try {
             binAlreadyCommitted = this.processBin(bin, binCopy, context, session);
         } catch (final ProcessException e) {
-            logger.
-                    error("Failed to process bundle of {} files due to {}", new Object[]{binCopy.size(), e});
+            logger.error("Failed to process bundle of {} files due to {}", new Object[]{binCopy.size(), e});
 
             for (final FlowFileSessionWrapper wrapper : binCopy) {
                 wrapper.getSession().transfer(wrapper.getFlowFile(), REL_FAILURE);
@@ -294,8 +293,7 @@ public abstract class BinFiles extends AbstractSessionFactoryProcessor {
         }
 
         if (context.getProperty(MAX_SIZE).isSet()) {
-            binManager.setMaximumSize(context.getProperty(MAX_SIZE).
-                    asDataSize(DataUnit.B).longValue());
+            binManager.setMaximumSize(context.getProperty(MAX_SIZE).asDataSize(DataUnit.B).longValue());
         } else {
             binManager.setMaximumSize(Long.MAX_VALUE);
         }
@@ -313,8 +311,7 @@ public abstract class BinFiles extends AbstractSessionFactoryProcessor {
 
     @Override
     protected final Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> problems = new ArrayList<>(super.
-                customValidate(context));
+        final List<ValidationResult> problems = new ArrayList<>(super.customValidate(context));
 
         final long minBytes = context.getProperty(MIN_SIZE).asDataSize(DataUnit.B).longValue();
         final Double maxBytes = context.getProperty(MAX_SIZE).asDataSize(DataUnit.B);
@@ -330,10 +327,8 @@ public abstract class BinFiles extends AbstractSessionFactoryProcessor {
             );
         }
 
-        final Long min = context.getProperty(MIN_ENTRIES).
-                asLong();
-        final Long max = context.getProperty(MAX_ENTRIES).
-                asLong();
+        final Long min = context.getProperty(MIN_ENTRIES).asLong();
+        final Long max = context.getProperty(MAX_ENTRIES).asLong();
 
         if (min != null && max != null) {
             if (min > max) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
index d9946da..585902b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
@@ -146,8 +146,7 @@ public class CompressContent extends AbstractProcessor {
         mimeTypeMap.put("application/gzip", COMPRESSION_FORMAT_GZIP);
         mimeTypeMap.put("application/bzip2", COMPRESSION_FORMAT_BZIP2);
         mimeTypeMap.put("application/x-lzma", COMPRESSION_FORMAT_LZMA);
-        this.compressionFormatMimeTypeMap = Collections.
-                unmodifiableMap(mimeTypeMap);
+        this.compressionFormatMimeTypeMap = Collections.unmodifiableMap(mimeTypeMap);
     }
 
     @Override
@@ -286,8 +285,7 @@ public class CompressContent extends AbstractProcessor {
 
             final long sizeAfterCompression = flowFile.getSize();
             if (MODE_DECOMPRESS.equalsIgnoreCase(compressionMode)) {
-                flowFile = session.
-                        removeAttribute(flowFile, CoreAttributes.MIME_TYPE.key());
+                flowFile = session.removeAttribute(flowFile, CoreAttributes.MIME_TYPE.key());
 
                 if (context.getProperty(UPDATE_FILENAME).asBoolean()) {
                     final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
@@ -296,8 +294,7 @@ public class CompressContent extends AbstractProcessor {
                     }
                 }
             } else {
-                flowFile = session.
-                        putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), mimeTypeRef.get());
+                flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), mimeTypeRef.get());
 
                 if (context.getProperty(UPDATE_FILENAME).asBoolean()) {
                     final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index 10cd45d..47c3cb4 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -156,10 +156,10 @@ public class ControlRate extends AbstractProcessor {
                 rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
                 final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
                 if (rateAttr == null) {
-                    validationResults.add(new ValidationResult.Builder().
-                            subject(RATE_CONTROL_ATTRIBUTE_NAME.getName()).
-                            explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'").
-                            build());
+                    validationResults.add(new ValidationResult.Builder()
+                            .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+                            .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+                            .build());
                 }
                 break;
             case FLOWFILE_RATE:
@@ -281,8 +281,7 @@ public class ControlRate extends AbstractProcessor {
         throttle.lock();
         try {
             if (throttle.tryAdd(rateValue)) {
-                logger.
-                        info("transferring {} to 'success'", new Object[]{flowFile});
+                logger.info("transferring {} to 'success'", new Object[]{flowFile});
                 session.transfer(flowFile, REL_SUCCESS);
             } else {
                 flowFile = session.penalize(flowFile);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index ec61370..a0a1364 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -16,6 +16,13 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessorInitializationContext;
@@ -34,13 +41,16 @@ import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
-import java.io.*;
 import java.nio.CharBuffer;
 import java.nio.charset.Charset;
 import java.nio.charset.CharsetDecoder;
 import java.nio.charset.CharsetEncoder;
 import java.nio.charset.CodingErrorAction;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 /**
@@ -117,12 +127,8 @@ public class ConvertCharacterSet extends AbstractProcessor {
     public void onTrigger(final ProcessContext context, final ProcessSession session) {
         final ProcessorLog logger = getLogger();
 
-        final Charset inputCharset = Charset.forName(context.
-                getProperty(INPUT_CHARSET).
-                getValue());
-        final Charset outputCharset = Charset.forName(context.
-                getProperty(OUTPUT_CHARSET).
-                getValue());
+        final Charset inputCharset = Charset.forName(context.getProperty(INPUT_CHARSET).getValue());
+        final Charset outputCharset = Charset.forName(context.getProperty(OUTPUT_CHARSET).getValue());
         final CharBuffer charBuffer = CharBuffer.allocate(MAX_BUFFER_SIZE);
 
         final CharsetDecoder decoder = inputCharset.newDecoder();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
index 585ba1d..afff3c4 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
@@ -117,11 +117,11 @@ public class DistributeLoad extends AbstractProcessor {
                 }
             }).build();
     public static final PropertyDescriptor LOAD_DISTRIBUTION_SERVICE_TEMPLATE = new PropertyDescriptor.Builder()
-            .name("Load Distribution Service ID").
-            description("The identifier of the Load Distribution Service").
-            required(true).
-            identifiesControllerService(LoadDistributionService.class).
-            build();
+            .name("Load Distribution Service ID")
+            .description("The identifier of the Load Distribution Service")
+            .required(true)
+            .identifiesControllerService(LoadDistributionService.class)
+            .build();
 
     private List<PropertyDescriptor> properties;
     private final AtomicReference<Set<Relationship>> relationshipsRef = new AtomicReference<>();
@@ -327,8 +327,7 @@ public class DistributeLoad extends AbstractProcessor {
         final List<Relationship> relationshipList = new ArrayList<>();
         for (final Map.Entry<Integer, Integer> entry : weightings.entrySet()) {
             final String relationshipName = String.valueOf(entry.getKey());
-            final Relationship relationship = new Relationship.Builder().
-                    name(relationshipName).build();
+            final Relationship relationship = new Relationship.Builder().name(relationshipName).build();
             for (int i = 0; i < entry.getValue(); i++) {
                 relationshipList.add(relationship);
             }
@@ -386,8 +385,8 @@ public class DistributeLoad extends AbstractProcessor {
     private static interface DistributionStrategy {
 
         /**
-         * @param session session
-         * @param flowFiles flowFile
+         * @param context context
+         * @param flowFile flowFile
          * @return a mapping of FlowFile to Relationship or <code>null</code> if the needed relationships are not available to accept files
          */
         Relationship mapToRelationship(ProcessContext context, FlowFile flowFile);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
index 042e4a6..b4ddc16 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
@@ -151,8 +151,7 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
 
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> results = new ArrayList<>(super.
-                customValidate(context));
+        final List<ValidationResult> results = new ArrayList<>(super.customValidate(context));
 
         final String destination = context.getProperty(DESTINATION).getValue();
         if (DESTINATION_CONTENT.equals(destination)) {
@@ -165,8 +164,8 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
             }
 
             if (jsonPathCount != 1) {
-                results.add(new ValidationResult.Builder().subject("JsonPaths").valid(false).
-                        explanation("Exactly one JsonPath must be set if using destination of " + DESTINATION_CONTENT).build());
+                results.add(new ValidationResult.Builder().subject("JsonPaths").valid(false)
+                        .explanation("Exactly one JsonPath must be set if using destination of " + DESTINATION_CONTENT).build());
             }
         }
 
@@ -185,18 +184,17 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
 
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
-        return new PropertyDescriptor.Builder().name(propertyDescriptorName).expressionLanguageSupported(false).
-                addValidator(new JsonPathValidator() {
-                    @Override
-                    public void cacheComputedValue(String subject, String input, JsonPath computedJsonPath) {
-                        cachedJsonPathMap.put(input, computedJsonPath);
-                    }
-
-                    @Override
-                    public boolean isStale(String subject, String input) {
-                        return cachedJsonPathMap.get(input) == null;
-                    }
-                }).required(false).dynamic(true).build();
+        return new PropertyDescriptor.Builder().name(propertyDescriptorName).expressionLanguageSupported(false).addValidator(new JsonPathValidator() {
+            @Override
+            public void cacheComputedValue(String subject, String input, JsonPath computedJsonPath) {
+                cachedJsonPathMap.put(input, computedJsonPath);
+            }
+
+            @Override
+            public boolean isStale(String subject, String input) {
+                return cachedJsonPathMap.get(input) == null;
+            }
+        }).required(false).dynamic(true).build();
     }
 
     @Override
@@ -235,8 +233,7 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
 
         final ProcessorLog logger = getLogger();
 
-        String representationOption = processContext.
-                getProperty(NULL_VALUE_DEFAULT_REPRESENTATION).getValue();
+        String representationOption = processContext.getProperty(NULL_VALUE_DEFAULT_REPRESENTATION).getValue();
         final String nullDefaultValue = NULL_REPRESENTATION_MAP.get(representationOption);
 
         /* Build the JsonPath expressions from attributes */
@@ -309,8 +306,7 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
                         @Override
                         public void process(final OutputStream out) throws IOException {
                             try (OutputStream outputStream = new BufferedOutputStream(out)) {
-                                outputStream.write(resultRepresentation.
-                                        getBytes(StandardCharsets.UTF_8));
+                                outputStream.write(resultRepresentation.getBytes(StandardCharsets.UTF_8));
                             }
                         }
                     });

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
index 1ea0748..a24f4d0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
@@ -165,11 +165,9 @@ public class EvaluateXPath extends AbstractProcessor {
 
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> results = new ArrayList<>(super.
-                customValidate(context));
+        final List<ValidationResult> results = new ArrayList<>(super.customValidate(context));
 
-        final String destination = context.getProperty(DESTINATION).
-                getValue();
+        final String destination = context.getProperty(DESTINATION).getValue();
         if (DESTINATION_CONTENT.equals(destination)) {
             int xpathCount = 0;
 
@@ -356,8 +354,7 @@ public class EvaluateXPath extends AbstractProcessor {
                             @Override
                             public void process(final OutputStream rawOut) throws IOException {
                                 try (final OutputStream out = new BufferedOutputStream(rawOut)) {
-                                    out.write(resultString.
-                                            getBytes("UTF-8"));
+                                    out.write(resultString.getBytes("UTF-8"));
                                 }
                             }
                         });

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
index c6321ad..5e8f0ba 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
@@ -184,8 +184,7 @@ public class EvaluateXQuery extends AbstractProcessor {
 
     @Override
     protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> results = new ArrayList<>(super.
-                customValidate(context));
+        final List<ValidationResult> results = new ArrayList<>(super.customValidate(context));
 
         final String destination = context.getProperty(DESTINATION).getValue();
         if (DESTINATION_CONTENT.equals(destination)) {
@@ -311,8 +310,7 @@ public class EvaluateXQuery extends AbstractProcessor {
                         }
                     } else { // if (DESTINATION_CONTENT.equals(destination)){
                         if (result.size() == 0) {
-                            logger.
-                                    info("Routing {} to 'unmatched'", new Object[]{flowFile});
+                            logger.info("Routing {} to 'unmatched'", new Object[]{flowFile});
                             session.transfer(flowFile, REL_NO_MATCH);
                             continue flowFileLoop;
                         } else if (result.size() == 1) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
index 7950c9c..424094c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
@@ -163,8 +163,7 @@ public class ExecuteProcess extends AbstractProcessor {
                     if (inQuotes) {
                         sb.append(c);
                     } else {
-                        final String arg = sb.toString().
-                                trim();
+                        final String arg = sb.toString().trim();
                         if (!arg.isEmpty()) {
                             args.add(arg);
                         }
@@ -377,13 +376,11 @@ public class ExecuteProcess extends AbstractProcessor {
         }
 
         final int exitCode;
-        final long millis = TimeUnit.NANOSECONDS.
-                toMillis(System.nanoTime() - startNanos);
+        final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
         try {
             exitCode = process.waitFor();
         } catch (final InterruptedException ie) {
-            getLogger().
-                    warn("Process was interrupted before finishing");
+            getLogger().warn("Process was interrupted before finishing");
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
index ddeb51a..63fd55b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
@@ -121,10 +121,10 @@ import org.apache.nifi.stream.io.StreamUtils;
     @WritesAttribute(attribute = "execution.error", description = "Any error messages returned from executing the command")})
 public class ExecuteStreamCommand extends AbstractProcessor {
 
-    public static final Relationship ORIGINAL_RELATIONSHIP = new Relationship.Builder().
-            name("original").
-            description("FlowFiles that were successfully processed").
-            build();
+    public static final Relationship ORIGINAL_RELATIONSHIP = new Relationship.Builder()
+            .name("original")
+            .description("FlowFiles that were successfully processed")
+            .build();
     public static final Relationship OUTPUT_STREAM_RELATIONSHIP = new Relationship.Builder()
             .name("output stream")
             .description("The destination path for the flow file created from the command's output")
@@ -139,8 +139,8 @@ public class ExecuteStreamCommand extends AbstractProcessor {
     }
 
     private static final Validator ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR = StandardValidators.createAttributeExpressionLanguageValidator(ResultType.STRING, true);
-    static final PropertyDescriptor EXECUTION_COMMAND = new PropertyDescriptor.Builder().
-            name("Command Path")
+    static final PropertyDescriptor EXECUTION_COMMAND = new PropertyDescriptor.Builder()
+            .name("Command Path")
             .description("Specifies the command to be executed; if just the name of an executable is provided, it must be in the user's environment PATH.")
             .expressionLanguageSupported(true)
             .addValidator(ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR)
@@ -158,8 +158,7 @@ public class ExecuteStreamCommand extends AbstractProcessor {
                     .subject(subject).valid(true).input(input).build();
                     String[] args = input.split(";");
                     for (String arg : args) {
-                        ValidationResult valResult = ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR.
-                        validate(subject, arg, context);
+                        ValidationResult valResult = ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR.validate(subject, arg, context);
                         if (!valResult.isValid()) {
                             result = valResult;
                             break;
@@ -255,8 +254,7 @@ public class ExecuteStreamCommand extends AbstractProcessor {
             session.read(flowFile, callback);
             outputStreamFlowFile = callback.outputStreamFlowFile;
             exitCode = callback.exitCode;
-            logger.
-                    debug("Execution complete for command: {}.  Exited with code: {}", new Object[]{executeCommand, exitCode});
+            logger.debug("Execution complete for command: {}.  Exited with code: {}", new Object[]{executeCommand, exitCode});
 
             Map<String, String> attributes = new HashMap<>();
 
@@ -328,8 +326,7 @@ public class ExecuteStreamCommand extends AbstractProcessor {
                             try {
                                 StreamUtils.copy(incomingFlowFileIS, stdInWritable);
                             } catch (IOException e) {
-                                logger.
-                                        error("Failed to write flow file to stdIn due to {}", new Object[]{e}, e);
+                                logger.error("Failed to write flow file to stdIn due to {}", new Object[]{e}, e);
                             }
                             // MUST close the output stream to the stdIn so that whatever is reading knows
                             // there is no more data

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
index d413b1a..1bcd3bf 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
@@ -272,8 +272,7 @@ public class ExtractText extends AbstractProcessor {
         final Map<String, Pattern> patternMap = compiledPattersMapRef.get();
         for (final Map.Entry<String, Pattern> entry : patternMap.entrySet()) {
 
-            final Matcher matcher = entry.getValue().
-                    matcher(contentString);
+            final Matcher matcher = entry.getValue().matcher(contentString);
 
             if (matcher.find()) {
                 final String baseKey = entry.getKey();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
index ebcca86..aa1206a 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
@@ -149,8 +149,7 @@ public class GenerateFlowFile extends AbstractProcessor {
             data = this.data.get();
         }
 
-        for (int i = 0; i < context.getProperty(BATCH_SIZE).
-                asInteger(); i++) {
+        for (int i = 0; i < context.getProperty(BATCH_SIZE).asInteger(); i++) {
             FlowFile flowFile = session.create();
             if (data.length > 0) {
                 flowFile = session.write(flowFile, new OutputStreamCallback() {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
index bc2fac2..0fa9178 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
@@ -344,8 +344,7 @@ public class GetFile extends AbstractProcessor {
             if (store.supportsFileAttributeView("posix")) {
                 try {
                     PosixFileAttributeView view = Files.getFileAttributeView(file, PosixFileAttributeView.class);
-                    attributes.
-                            put(FILE_PERMISSIONS_ATTRIBUTE, PosixFilePermissions.toString(view.readAttributes().permissions()));
+                    attributes.put(FILE_PERMISSIONS_ATTRIBUTE, PosixFilePermissions.toString(view.readAttributes().permissions()));
                     attributes.put(FILE_GROUP_ATTRIBUTE, view.readAttributes().group().getName());
                 } catch (Exception ignore) {
                 } // allow other attributes if these fail
@@ -425,8 +424,7 @@ public class GetFile extends AbstractProcessor {
 
                 flowFile = session.create();
                 final long importStart = System.nanoTime();
-                flowFile = session.
-                        importFrom(filePath, keepingSourceFile, flowFile);
+                flowFile = session.importFrom(filePath, keepingSourceFile, flowFile);
                 final long importNanos = System.nanoTime() - importStart;
                 final long importMillis = TimeUnit.MILLISECONDS.convert(importNanos, TimeUnit.NANOSECONDS);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
index 94b49fe..e7209cc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
@@ -268,8 +268,7 @@ public class GetJMSTopic extends JmsConsumer {
         final String serverUrl = props.getProperty(URL.getName());
         final String username = props.getProperty(USERNAME.getName());
         final String encryptedPassword = props.getProperty(PASSWORD.getName());
-        final String subscriptionName = props.
-                getProperty(SUBSCRIPTION_NAME_PROPERTY);
+        final String subscriptionName = props.getProperty(SUBSCRIPTION_NAME_PROPERTY);
         final String jmsProvider = props.getProperty(JMS_PROVIDER.getName());
 
         final String password = encryptedPassword == null ? null : context.decrypt(encryptedPassword);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
index e7f28be..2583e88 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
@@ -104,8 +104,7 @@ import com.sun.jersey.api.client.ClientResponse.Status;
 public class HandleHttpRequest extends AbstractProcessor {
 
     public static final String HTTP_CONTEXT_ID = "http.context.identifier";
-    private static final Pattern URL_QUERY_PARAM_DELIMITER = Pattern.
-            compile("&");
+    private static final Pattern URL_QUERY_PARAM_DELIMITER = Pattern.compile("&");
 
     // Allowable values for client auth
     public static final AllowableValue CLIENT_NONE = new AllowableValue("No Authentication", "No Authentication",
@@ -174,13 +173,13 @@ public class HandleHttpRequest extends AbstractProcessor {
             .allowableValues("true", "false")
             .defaultValue("true")
             .build();
-    public static final PropertyDescriptor ALLOW_PUT = new PropertyDescriptor.Builder().
-            name("Allow PUT").
-            description("Allow HTTP PUT Method").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
+    public static final PropertyDescriptor ALLOW_PUT = new PropertyDescriptor.Builder()
+            .name("Allow PUT")
+            .description("Allow HTTP PUT Method")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
     public static final PropertyDescriptor ALLOW_DELETE = new PropertyDescriptor.Builder()
             .name("Allow DELETE")
             .description("Allow HTTP DELETE Method")

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
index 0201730..6de3fe6 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
@@ -154,8 +154,7 @@ public class HandleHttpResponse extends AbstractProcessor {
             response.flushBuffer();
         } catch (final IOException ioe) {
             session.transfer(flowFile, REL_FAILURE);
-            getLogger().
-                    error("Failed to respond to HTTP request for {} due to {}", new Object[]{flowFile, ioe});
+            getLogger().error("Failed to respond to HTTP request for {} due to {}", new Object[]{flowFile, ioe});
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
index 314f1c7..b3dbf83 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
@@ -129,8 +129,7 @@ public class HashAttribute extends AbstractProcessor {
 
     private Set<Relationship> relationships;
     private List<PropertyDescriptor> properties;
-    private final AtomicReference<Map<String, Pattern>> regexMapRef = new AtomicReference<>(Collections.
-            <String, Pattern>emptyMap());
+    private final AtomicReference<Map<String, Pattern>> regexMapRef = new AtomicReference<>(Collections.<String, Pattern>emptyMap());
 
     @Override
     protected void init(final ProcessorInitializationContext context) {
@@ -157,12 +156,7 @@ public class HashAttribute extends AbstractProcessor {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .name(propertyDescriptorName).
-                addValidator(StandardValidators.
-                        createRegexValidator(0, 1, false)).
-                required(false).
-                dynamic(true).
-                build();
+                .name(propertyDescriptorName).addValidator(StandardValidators.createRegexValidator(0, 1, false)).required(false).dynamic(true).build();
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
index 8f4286b..e9eae83 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
@@ -244,17 +244,16 @@ public final class InvokeHTTP extends AbstractProcessor {
                 .identifiesControllerService(SSLContextService.class)
                 .build();
 
-        public static final List<PropertyDescriptor> PROPERTIES = Collections.
-                unmodifiableList(Arrays.asList(
-                                PROP_METHOD,
-                                PROP_URL,
-                                PROP_SSL_CONTEXT_SERVICE,
-                                PROP_CONNECT_TIMEOUT,
-                                PROP_READ_TIMEOUT,
-                                PROP_DATE_HEADER,
-                                PROP_FOLLOW_REDIRECTS,
-                                PROP_ATTRIBUTES_TO_SEND
-                        ));
+        public static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
+                PROP_METHOD,
+                PROP_URL,
+                PROP_SSL_CONTEXT_SERVICE,
+                PROP_CONNECT_TIMEOUT,
+                PROP_READ_TIMEOUT,
+                PROP_DATE_HEADER,
+                PROP_FOLLOW_REDIRECTS,
+                PROP_ATTRIBUTES_TO_SEND
+        ));
 
         // property to allow the hostname verifier to be overridden
         // this is a "hidden" property - it's configured using a dynamic user property
@@ -559,8 +558,7 @@ public final class InvokeHTTP extends AbstractProcessor {
         private Map<String, String> convertAttributesFromHeaders() throws IOException {
             // create a new hashmap to store the values from the connection
             Map<String, String> map = new HashMap<>();
-            for (Map.Entry<String, List<String>> entry : conn.getHeaderFields().
-                    entrySet()) {
+            for (Map.Entry<String, List<String>> entry : conn.getHeaderFields().entrySet()) {
                 String key = entry.getKey();
                 if (key == null) {
                     continue;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java
index 6b3283c..b53d62f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JmsConsumer.java
@@ -175,7 +175,8 @@ public abstract class JmsConsumer extends AbstractProcessor {
         }
     }
 
-    public static JmsProcessingSummary map2FlowFile(final ProcessContext context, final ProcessSession session, final Message message, final boolean addAttributes, ProcessorLog logger) throws Exception {
+    public static JmsProcessingSummary map2FlowFile(final ProcessContext context, final ProcessSession session, final Message message, final boolean addAttributes, ProcessorLog logger)
+            throws Exception {
 
         // Currently not very useful, because always one Message == one FlowFile
         final IntegerHolder msgsThisFlowFile = new IntegerHolder(1);
@@ -186,8 +187,7 @@ public abstract class JmsConsumer extends AbstractProcessor {
             if (message instanceof MapMessage) {
                 MapMessage mapMessage = (MapMessage) message;
                 flowFile = session.putAllAttributes(flowFile, createMapMessageValues(mapMessage));
-            } // all other message types, write Message body to FlowFile content 
-            else {
+            } else { // all other message types, write Message body to FlowFile content
                 flowFile = session.write(flowFile, new OutputStreamCallback() {
                     @Override
                     public void process(final OutputStream rawOut) throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
index fa17df1..fa60d6b 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
@@ -226,11 +226,11 @@ public class ListenUDP extends AbstractSessionFactoryProcessor {
         } catch (SocketException e) {
         }
     }
-    public static final PropertyDescriptor NETWORK_INTF_NAME = new PropertyDescriptor.Builder().
-            name("Local Network Interface").
-            description("The name of a local network interface to be used to restrict listening for UDP Datagrams to a specific LAN."
-                    + "May be a system property or an environment variable.").
-            addValidator(new Validator() {
+    public static final PropertyDescriptor NETWORK_INTF_NAME = new PropertyDescriptor.Builder()
+            .name("Local Network Interface")
+            .description("The name of a local network interface to be used to restrict listening for UDP Datagrams to a specific LAN."
+                    + "May be a system property or an environment variable.")
+            .addValidator(new Validator() {
                 @Override
                 public ValidationResult validate(String subject, String input, ValidationContext context) {
                     ValidationResult result = new ValidationResult.Builder()
@@ -257,7 +257,8 @@ public class ListenUDP extends AbstractSessionFactoryProcessor {
 
                     return result;
                 }
-            }).expressionLanguageSupported(true).build();
+            })
+            .expressionLanguageSupported(true).build();
 
     static {
         List<PropertyDescriptor> props = new ArrayList<>();
@@ -303,102 +304,100 @@ public class ListenUDP extends AbstractSessionFactoryProcessor {
     /**
      * Create the ChannelListener and a thread that causes the Consumer to create flow files.
      *
-     * @param context
-     * @throws IOException
+     * @param context context
+     * @throws IOException ex
      */
     @OnScheduled
     public void initializeChannelListenerAndConsumerProcessing(final ProcessContext context) throws IOException {
         getChannelListener(context);
         stopping.set(false);
-        Future<Tuple<ProcessSession, List<FlowFile>>> consumerFuture = consumerExecutorService.
-                submit(new Callable<Tuple<ProcessSession, List<FlowFile>>>() {
-
-                    @Override
-                    public Tuple<ProcessSession, List<FlowFile>> call() {
-                        final int maxFlowFilesPerSession = context.getProperty(FLOW_FILES_PER_SESSION).asInteger();
-                        final long channelReaderIntervalMSecs = context.getProperty(CHANNEL_READER_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-                        // number of waits in 5 secs, or 1
-                        final int maxWaits = (int) (channelReaderIntervalMSecs <= 1000 ? 5000 / channelReaderIntervalMSecs : 1);
-                        final ProcessorLog logger = getLogger();
-                        int flowFileCount = maxFlowFilesPerSession;
-                        ProcessSession session = null;
-                        int numWaits = 0;
-                        while (!stopping.get()) {
-                            UDPStreamConsumer consumer = consumerRef.get();
-                            if (consumer == null || sessionFactoryRef.get() == null) {
-                                try {
-                                    Thread.sleep(100L);
-                                } catch (InterruptedException swallow) {
-                                }
-                            } else {
-                                try {
+        Future<Tuple<ProcessSession, List<FlowFile>>> consumerFuture = consumerExecutorService.submit(new Callable<Tuple<ProcessSession, List<FlowFile>>>() {
+
+            @Override
+            public Tuple<ProcessSession, List<FlowFile>> call() {
+                final int maxFlowFilesPerSession = context.getProperty(FLOW_FILES_PER_SESSION).asInteger();
+                final long channelReaderIntervalMSecs = context.getProperty(CHANNEL_READER_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+                // number of waits in 5 secs, or 1
+                final int maxWaits = (int) (channelReaderIntervalMSecs <= 1000 ? 5000 / channelReaderIntervalMSecs : 1);
+                final ProcessorLog logger = getLogger();
+                int flowFileCount = maxFlowFilesPerSession;
+                ProcessSession session = null;
+                int numWaits = 0;
+                while (!stopping.get()) {
+                    UDPStreamConsumer consumer = consumerRef.get();
+                    if (consumer == null || sessionFactoryRef.get() == null) {
+                        try {
+                            Thread.sleep(100L);
+                        } catch (InterruptedException swallow) {
+                        }
+                    } else {
+                        try {
                                     // first time through, flowFileCount is maxFlowFilesPerSession so that a session
-                                    // is created and the consumer is updated with it.
-                                    if (flowFileCount == maxFlowFilesPerSession || numWaits == maxWaits) {
-                                        logger.debug("Have waited {} times", new Object[]{numWaits});
-                                        numWaits = 0;
-                                        if (session != null) {
-                                            Tuple<ProcessSession, List<FlowFile>> flowFilesPerSession = new Tuple<ProcessSession, List<FlowFile>>(session, new ArrayList<>(newFlowFiles));
-                                            newFlowFiles.clear();
-                                            flowFilesPerSessionQueue.
-                                            add(flowFilesPerSession);
-                                        }
-                                        session = sessionFactoryRef.get().createSession();
-                                        consumer.setSession(session);
-                                        flowFileCount = 0;
-                                    }
+                            // is created and the consumer is updated with it.
+                            if (flowFileCount == maxFlowFilesPerSession || numWaits == maxWaits) {
+                                logger.debug("Have waited {} times", new Object[]{numWaits});
+                                numWaits = 0;
+                                if (session != null) {
+                                    Tuple<ProcessSession, List<FlowFile>> flowFilesPerSession = new Tuple<ProcessSession, List<FlowFile>>(session, new ArrayList<>(newFlowFiles));
+                                    newFlowFiles.clear();
+                                    flowFilesPerSessionQueue.add(flowFilesPerSession);
+                                }
+                                session = sessionFactoryRef.get().createSession();
+                                consumer.setSession(session);
+                                flowFileCount = 0;
+                            }
                                     // this will throttle the processing of the received datagrams. If there are no more
-                                    // buffers to read into because none have been returned to the pool via consumer.process(),
-                                    // then the desired back pressure on the channel is created.
-                                    if (context.getAvailableRelationships().size() > 0) {
-                                        consumer.process();
-                                        if (flowFileCount == newFlowFiles.size()) {
+                            // buffers to read into because none have been returned to the pool via consumer.process(),
+                            // then the desired back pressure on the channel is created.
+                            if (context.getAvailableRelationships().size() > 0) {
+                                consumer.process();
+                                if (flowFileCount == newFlowFiles.size()) {
                                             // no new datagrams received, need to throttle this thread back so it does
-                                            // not consume all cpu...but don't want to cause back pressure on the channel
-                                            // so the sleep time is same as the reader interval
-                                            // If have done this for approx. 5 secs, assume datagram sender is down. So, push
-                                            // out the remaining flow files (see numWaits == maxWaits above)
-                                            Thread.sleep(channelReaderIntervalMSecs);
-                                            if (flowFileCount > 0) {
-                                                numWaits++;
-                                            }
-                                        } else {
-                                            flowFileCount = newFlowFiles.size();
-                                        }
-                                    } else {
-                                        logger.debug("Creating back pressure...no available destinations");
-                                        Thread.sleep(1000L);
-                                    }
-                                } catch (final IOException ioe) {
-                                    logger.error("Unable to fully process consumer {}", new Object[]{consumer}, ioe);
-                                } catch (InterruptedException e) {
-                                    // don't care
-                                } finally {
-                                    if (consumer.isConsumerFinished()) {
-                                        logger.info("Consumer {} was closed and is finished", new Object[]{consumer});
-                                        consumerRef.set(null);
-                                        disconnect();
-                                        if (!stopping.get()) {
-                                            resetChannelListener.set(true);
-                                        }
+                                    // not consume all cpu...but don't want to cause back pressure on the channel
+                                    // so the sleep time is same as the reader interval
+                                    // If have done this for approx. 5 secs, assume datagram sender is down. So, push
+                                    // out the remaining flow files (see numWaits == maxWaits above)
+                                    Thread.sleep(channelReaderIntervalMSecs);
+                                    if (flowFileCount > 0) {
+                                        numWaits++;
                                     }
+                                } else {
+                                    flowFileCount = newFlowFiles.size();
                                 }
+                            } else {
+                                logger.debug("Creating back pressure...no available destinations");
+                                Thread.sleep(1000L);
                             }
-                        }
-                        // when shutting down, need consumer to drain rest of cached buffers and clean up.
-                        // prior to getting here, the channelListener was shutdown
-                        UDPStreamConsumer consumer;
-                        while ((consumer = consumerRef.get()) != null && !consumer.isConsumerFinished()) {
-                            try {
-                                consumer.process();
-                            } catch (IOException swallow) {
-                                // if this is blown...consumer.isConsumerFinished will be true
+                        } catch (final IOException ioe) {
+                            logger.error("Unable to fully process consumer {}", new Object[]{consumer}, ioe);
+                        } catch (InterruptedException e) {
+                            // don't care
+                        } finally {
+                            if (consumer.isConsumerFinished()) {
+                                logger.info("Consumer {} was closed and is finished", new Object[]{consumer});
+                                consumerRef.set(null);
+                                disconnect();
+                                if (!stopping.get()) {
+                                    resetChannelListener.set(true);
+                                }
                             }
                         }
-                        Tuple<ProcessSession, List<FlowFile>> flowFilesPerSession = new Tuple<ProcessSession, List<FlowFile>>(session, new ArrayList<>(newFlowFiles));
-                        return flowFilesPerSession;
                     }
-                });
+                }
+                        // when shutting down, need consumer to drain rest of cached buffers and clean up.
+                // prior to getting here, the channelListener was shutdown
+                UDPStreamConsumer consumer;
+                while ((consumer = consumerRef.get()) != null && !consumer.isConsumerFinished()) {
+                    try {
+                        consumer.process();
+                    } catch (IOException swallow) {
+                        // if this is blown...consumer.isConsumerFinished will be true
+                    }
+                }
+                Tuple<ProcessSession, List<FlowFile>> flowFilesPerSession = new Tuple<ProcessSession, List<FlowFile>>(session, new ArrayList<>(newFlowFiles));
+                return flowFilesPerSession;
+            }
+        });
         consumerFutureRef.set(consumerFuture);
     }
 
@@ -434,8 +433,7 @@ public class ListenUDP extends AbstractSessionFactoryProcessor {
 
                     @Override
                     public StreamConsumer newInstance(final String streamId) {
-                        final UDPStreamConsumer consumer = new UDPStreamConsumer(streamId, newFlowFiles, flowFileSizeTrigger.
-                                intValue(), getLogger());
+                        final UDPStreamConsumer consumer = new UDPStreamConsumer(streamId, newFlowFiles, flowFileSizeTrigger.intValue(), getLogger());
                         consumerRef.set(consumer);
                         return consumer;
                     }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index bd639dd..b11dee3 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -327,8 +327,7 @@ public class MergeContent extends BinFiles {
     protected boolean processBin(final Bin unmodifiableBin, final List<FlowFileSessionWrapper> binCopy, final ProcessContext context,
             final ProcessSession session) throws ProcessException {
 
-        final String mergeFormat = context.getProperty(MERGE_FORMAT).
-                getValue();
+        final String mergeFormat = context.getProperty(MERGE_FORMAT).getValue();
         MergeBin merger;
         switch (mergeFormat) {
             case MERGE_FORMAT_TAR_VALUE:
@@ -458,8 +457,7 @@ public class MergeContent extends BinFiles {
             return false;
         }
 
-        return NUMBER_PATTERN.matcher(value).
-                matches();
+        return NUMBER_PATTERN.matcher(value).matches();
     }
 
     private class BinaryConcatenationMerge implements MergeBin {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
index 6b906c2..9509790 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
@@ -418,9 +418,8 @@ public class PostHTTP extends AbstractProcessor {
             try {
                 new java.net.URL(url);
             } catch (final MalformedURLException e) {
-                logger.
-                        error("After substituting attribute values for {}, URL is {}; this is not a valid URL, so routing to failure",
-                                new Object[]{flowFile, url});
+                logger.error("After substituting attribute values for {}, URL is {}; this is not a valid URL, so routing to failure",
+                        new Object[]{flowFile, url});
                 flowFile = session.penalize(flowFile);
                 session.transfer(flowFile, REL_FAILURE);
                 continue;
@@ -442,29 +441,28 @@ public class PostHTTP extends AbstractProcessor {
                 final HttpClientBuilder clientBuilder = HttpClientBuilder.create();
                 clientBuilder.setConnectionManager(conMan);
                 clientBuilder.setUserAgent(userAgent);
-                clientBuilder.
-                        addInterceptorFirst(new HttpResponseInterceptor() {
-                            @Override
-                            public void process(final HttpResponse response, final HttpContext httpContext) throws HttpException, IOException {
-                                HttpCoreContext coreContext = HttpCoreContext.adapt(httpContext);
-                                ManagedHttpClientConnection conn = coreContext.getConnection(ManagedHttpClientConnection.class);
-                                if (!conn.isOpen()) {
-                                    return;
-                                }
-
-                                SSLSession sslSession = conn.getSSLSession();
+                clientBuilder.addInterceptorFirst(new HttpResponseInterceptor() {
+                    @Override
+                    public void process(final HttpResponse response, final HttpContext httpContext) throws HttpException, IOException {
+                        HttpCoreContext coreContext = HttpCoreContext.adapt(httpContext);
+                        ManagedHttpClientConnection conn = coreContext.getConnection(ManagedHttpClientConnection.class);
+                        if (!conn.isOpen()) {
+                            return;
+                        }
 
-                                if (sslSession != null) {
-                                    final X509Certificate[] certChain = sslSession.getPeerCertificateChain();
-                                    if (certChain == null || certChain.length == 0) {
-                                        throw new SSLPeerUnverifiedException("No certificates found");
-                                    }
+                        SSLSession sslSession = conn.getSSLSession();
 
-                                    final X509Certificate cert = certChain[0];
-                                    dnHolder.set(cert.getSubjectDN().getName().trim());
-                                }
+                        if (sslSession != null) {
+                            final X509Certificate[] certChain = sslSession.getPeerCertificateChain();
+                            if (certChain == null || certChain.length == 0) {
+                                throw new SSLPeerUnverifiedException("No certificates found");
                             }
-                        });
+
+                            final X509Certificate cert = certChain[0];
+                            dnHolder.set(cert.getSubjectDN().getName().trim());
+                        }
+                    }
+                });
 
                 clientBuilder.disableAutomaticRetries();
                 clientBuilder.disableContentCompression();
@@ -783,8 +781,7 @@ public class PostHTTP extends AbstractProcessor {
 
             if (!isScheduled()) {
                 context.yield();
-                logger.
-                        warn("Failed to delete Hold that destination placed on {}; Processor has been stopped so routing FlowFile(s) to failure", new Object[]{flowFileDescription});
+                logger.warn("Failed to delete Hold that destination placed on {}; Processor has been stopped so routing FlowFile(s) to failure", new Object[]{flowFileDescription});
                 for (FlowFile flowFile : toSend) {
                     flowFile = session.penalize(flowFile);
                     session.transfer(flowFile, REL_FAILURE);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
index 8efc563..7e2dd31 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
@@ -274,8 +274,7 @@ public class PutEmail extends AbstractProcessor {
         final String bcc = context.getProperty(BCC).getValue();
 
         if (to == null && cc == null && bcc == null) {
-            errors.add(new ValidationResult.Builder().subject("To, CC, BCC").
-                    valid(false).explanation("Must specify at least one To/CC/BCC address").build());
+            errors.add(new ValidationResult.Builder().subject("To, CC, BCC").valid(false).explanation("Must specify at least one To/CC/BCC address").build());
         }
 
         return errors;
@@ -390,8 +389,7 @@ public class PutEmail extends AbstractProcessor {
 
         final ProcessorLog logger = this.getLogger();
 
-        for (Entry<String, PropertyDescriptor> entry : propertyToContext.
-                entrySet()) {
+        for (Entry<String, PropertyDescriptor> entry : propertyToContext.entrySet()) {
 
             // Evaluate the property descriptor against the flow file
             String flowFileValue = context.getProperty(entry.getValue()).evaluateAttributeExpressions(flowFile).getValue();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
index 051cb07..b959efa 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
@@ -57,10 +57,8 @@ import org.apache.nifi.processors.standard.util.FTPTransfer;
             + " you leave off the .")})
 public class PutFTP extends PutFileTransfer<FTPTransfer> {
 
-    private static final Pattern PRE_SEND_CMD_PATTERN = Pattern.
-            compile("^pre\\.cmd\\.(\\d+)$");
-    private static final Pattern POST_SEND_CMD_PATTERN = Pattern.
-            compile("^post\\.cmd\\.(\\d+)$");
+    private static final Pattern PRE_SEND_CMD_PATTERN = Pattern.compile("^pre\\.cmd\\.(\\d+)$");
+    private static final Pattern POST_SEND_CMD_PATTERN = Pattern.compile("^post\\.cmd\\.(\\d+)$");
 
     private final AtomicReference<List<PropertyDescriptor>> preSendDescriptorRef = new AtomicReference<>();
     private final AtomicReference<List<PropertyDescriptor>> postSendDescriptorRef = new AtomicReference<>();
@@ -109,8 +107,7 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
 
     @Override
     protected void afterPut(final FlowFile flowFile, final ProcessContext context, final FTPTransfer transfer) throws IOException {
-        transfer.
-                sendCommands(getCommands(postSendDescriptorRef.get(), context, flowFile), flowFile);
+        transfer.sendCommands(getCommands(postSendDescriptorRef.get(), context, flowFile), flowFile);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
index b60d07f..c03f755 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFileTransfer.java
@@ -44,7 +44,7 @@ import java.util.concurrent.TimeUnit;
 /**
  * Base class for PutFTP & PutSFTP
  *
- * @param <T>
+ * @param <T> type of transfer
  */
 public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractProcessor {
 
@@ -181,7 +181,14 @@ public abstract class PutFileTransfer<T extends FileTransfer> extends AbstractPr
     }
 
     //Attempts to identify naming or content issues with files before they are transferred.
-    private ConflictResult identifyAndResolveConflictFile(final String conflictResolutionType, final T transfer, final String path, final FlowFile flowFile, final boolean rejectZeroByteFiles, final ProcessorLog logger) throws IOException {
+    private ConflictResult identifyAndResolveConflictFile(
+            final String conflictResolutionType,
+            final T transfer,
+            final String path,
+            final FlowFile flowFile,
+            final boolean rejectZeroByteFiles,
+            final ProcessorLog logger)
+            throws IOException {
         Relationship destinationRelationship = REL_SUCCESS;
         String fileName = flowFile.getAttribute(CoreAttributes.FILENAME.key());
         boolean transferFile = true;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
index 65bbb36..034a3fc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
@@ -336,8 +336,7 @@ public class PutJMS extends AbstractProcessor {
             final String key = entry.getKey();
             final String value = entry.getValue();
 
-            if (key.toLowerCase().
-                    startsWith(ATTRIBUTE_PREFIX.toLowerCase()) && !key.toLowerCase().endsWith(ATTRIBUTE_TYPE_SUFFIX.toLowerCase())) {
+            if (key.toLowerCase().startsWith(ATTRIBUTE_PREFIX.toLowerCase()) && !key.toLowerCase().endsWith(ATTRIBUTE_TYPE_SUFFIX.toLowerCase())) {
 
                 final String jmsPropName = key.substring(ATTRIBUTE_PREFIX.length());
                 final String type = attributes.get(key + ATTRIBUTE_TYPE_SUFFIX);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/6a706458/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
index 395ddee..97fe7e5 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
@@ -76,12 +76,10 @@ public class PutSFTP extends PutFileTransfer<SFTPTransfer> {
 
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
-        if (SFTPTransfer.DISABLE_DIRECTORY_LISTING.getName().
-                equalsIgnoreCase(propertyDescriptorName)) {
+        if (SFTPTransfer.DISABLE_DIRECTORY_LISTING.getName().equalsIgnoreCase(propertyDescriptorName)) {
             return SFTPTransfer.DISABLE_DIRECTORY_LISTING;
         }
-        return super.
-                getSupportedDynamicPropertyDescriptor(propertyDescriptorName);
+        return super.getSupportedDynamicPropertyDescriptor(propertyDescriptorName);
     }
 
     @Override


[21/50] [abbrv] incubator-nifi git commit: NIFI-527: Code cleanup

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/expiration/ExpirationAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/expiration/ExpirationAction.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/expiration/ExpirationAction.java
index 8c266d1..0ffa5e6 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/expiration/ExpirationAction.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/expiration/ExpirationAction.java
@@ -25,9 +25,9 @@ public interface ExpirationAction {
      * Performs some action against the given File and returns the new File that
      * contains the modified version
      *
-     * @param expiredFile
-     * @return
-     * @throws IOException
+     * @param expiredFile the file that was expired
+     * @return the new file after the file has been renamed, or the expiredFile if the file was not renamed
+     * @throws IOException if there was an IO problem
      */
     File execute(File expiredFile) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DeleteIndexAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DeleteIndexAction.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DeleteIndexAction.java
index 7db04aa..70bf36e 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DeleteIndexAction.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DeleteIndexAction.java
@@ -49,9 +49,9 @@ public class DeleteIndexAction implements ExpirationAction {
         long numDeleted = 0;
         long maxEventId = -1L;
         try (final RecordReader reader = RecordReaders.newRecordReader(expiredFile, repository.getAllLogFiles())) {
-        	maxEventId = reader.getMaxEventId();
+            maxEventId = reader.getMaxEventId();
         } catch (final IOException ioe) {
-        	logger.warn("Failed to obtain max ID present in journal file {}", expiredFile.getAbsolutePath());
+            logger.warn("Failed to obtain max ID present in journal file {}", expiredFile.getAbsolutePath());
         }
 
         // remove the records from the index
@@ -68,19 +68,19 @@ public class DeleteIndexAction implements ExpirationAction {
                 deleteDir = (docsLeft <= 0);
                 logger.debug("After expiring {}, there are {} docs left for index {}", expiredFile, docsLeft, indexingDirectory);
             } finally {
-            	indexManager.returnIndexWriter(indexingDirectory, writer);
+                indexManager.returnIndexWriter(indexingDirectory, writer);
             }
 
             // we've confirmed that all documents have been removed. Delete the index directory.
             if (deleteDir) {
-            	indexManager.removeIndex(indexingDirectory);
+                indexManager.removeIndex(indexingDirectory);
                 indexConfiguration.removeIndexDirectory(indexingDirectory);
-                
+
                 deleteDirectory(indexingDirectory);
                 logger.info("Removed empty index directory {}", indexingDirectory);
             }
         }
-        
+
         // Update the minimum index to 1 more than the max Event ID in this file.
         if (maxEventId > -1L) {
             indexConfiguration.setMinIdIndexed(maxEventId + 1L);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DocsReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DocsReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DocsReader.java
index 5a77f42..98137fb 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DocsReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/DocsReader.java
@@ -45,12 +45,13 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class DocsReader {
-	private final Logger logger = LoggerFactory.getLogger(DocsReader.class);
-	
+    private final Logger logger = LoggerFactory.getLogger(DocsReader.class);
+
     public DocsReader(final List<File> storageDirectories) {
     }
 
-    public Set<ProvenanceEventRecord> read(final TopDocs topDocs, final IndexReader indexReader, final Collection<Path> allProvenanceLogFiles, final AtomicInteger retrievalCount, final int maxResults) throws IOException {
+    public Set<ProvenanceEventRecord> read(final TopDocs topDocs, final IndexReader indexReader, final Collection<Path> allProvenanceLogFiles,
+            final AtomicInteger retrievalCount, final int maxResults) throws IOException {
         if (retrievalCount.get() >= maxResults) {
             return Collections.emptySet();
         }
@@ -73,42 +74,42 @@ public class DocsReader {
         return read(docs, allProvenanceLogFiles);
     }
 
-    
+
     private long getByteOffset(final Document d, final RecordReader reader) {
         final IndexableField blockField = d.getField(FieldNames.BLOCK_INDEX);
         if ( blockField != null ) {
-        	final int blockIndex = blockField.numericValue().intValue();
-        	final TocReader tocReader = reader.getTocReader();
-        	return tocReader.getBlockOffset(blockIndex);
+            final int blockIndex = blockField.numericValue().intValue();
+            final TocReader tocReader = reader.getTocReader();
+            return tocReader.getBlockOffset(blockIndex);
         }
-        
-    	return d.getField(FieldNames.STORAGE_FILE_OFFSET).numericValue().longValue();
+
+        return d.getField(FieldNames.STORAGE_FILE_OFFSET).numericValue().longValue();
     }
-    
-    
+
+
     private ProvenanceEventRecord getRecord(final Document d, final RecordReader reader) throws IOException {
-    	IndexableField blockField = d.getField(FieldNames.BLOCK_INDEX);
-    	if ( blockField == null ) {
-    		reader.skipTo(getByteOffset(d, reader));
-    	} else {
-    		reader.skipToBlock(blockField.numericValue().intValue());
-    	}
-    	
+        IndexableField blockField = d.getField(FieldNames.BLOCK_INDEX);
+        if ( blockField == null ) {
+            reader.skipTo(getByteOffset(d, reader));
+        } else {
+            reader.skipToBlock(blockField.numericValue().intValue());
+        }
+
         StandardProvenanceEventRecord record;
         while ( (record = reader.nextRecord()) != null) {
-        	IndexableField idField = d.getField(SearchableFields.Identifier.getSearchableFieldName());
-        	if ( idField == null || idField.numericValue().longValue() == record.getEventId() ) {
-        		break;
-        	}
+            IndexableField idField = d.getField(SearchableFields.Identifier.getSearchableFieldName());
+            if ( idField == null || idField.numericValue().longValue() == record.getEventId() ) {
+                break;
+            }
         }
-        
+
         if ( record == null ) {
-        	throw new IOException("Failed to find Provenance Event " + d);
+            throw new IOException("Failed to find Provenance Event " + d);
         } else {
-        	return record;
+            return record;
         }
     }
-    
+
 
     public Set<ProvenanceEventRecord> read(final List<Document> docs, final Collection<Path> allProvenanceLogFiles) throws IOException {
         LuceneUtil.sortDocsForRetrieval(docs);
@@ -119,23 +120,23 @@ public class DocsReader {
 
         final long start = System.nanoTime();
         int logFileCount = 0;
-        
+
         final Set<String> storageFilesToSkip = new HashSet<>();
-        
+
         try {
             for (final Document d : docs) {
                 final String storageFilename = d.getField(FieldNames.STORAGE_FILENAME).stringValue();
                 if ( storageFilesToSkip.contains(storageFilename) ) {
-                	continue;
+                    continue;
                 }
-                
+
                 try {
                     if (reader != null && storageFilename.equals(lastStorageFilename)) {
-                       	matchingRecords.add(getRecord(d, reader));
+                        matchingRecords.add(getRecord(d, reader));
                     } else {
-                    	logger.debug("Opening log file {}", storageFilename);
-                    	
-                    	logFileCount++;
+                        logger.debug("Opening log file {}", storageFilename);
+
+                        logFileCount++;
                         if (reader != null) {
                             reader.close();
                         }
@@ -143,20 +144,20 @@ public class DocsReader {
                         List<File> potentialFiles = LuceneUtil.getProvenanceLogFiles(storageFilename, allProvenanceLogFiles);
                         if (potentialFiles.isEmpty()) {
                             logger.warn("Could not find Provenance Log File with basename {} in the "
-                            		+ "Provenance Repository; assuming file has expired and continuing without it", storageFilename);
+                                    + "Provenance Repository; assuming file has expired and continuing without it", storageFilename);
                             storageFilesToSkip.add(storageFilename);
                             continue;
                         }
 
                         if (potentialFiles.size() > 1) {
-                            throw new FileNotFoundException("Found multiple Provenance Log Files with basename " + 
-                            		storageFilename + " in the Provenance Repository");
+                            throw new FileNotFoundException("Found multiple Provenance Log Files with basename " +
+                                    storageFilename + " in the Provenance Repository");
                         }
 
                         for (final File file : potentialFiles) {
                             try {
-                            	reader = RecordReaders.newRecordReader(file, allProvenanceLogFiles);
-                               	matchingRecords.add(getRecord(d, reader));
+                                reader = RecordReaders.newRecordReader(file, allProvenanceLogFiles);
+                                matchingRecords.add(getRecord(d, reader));
                             } catch (final IOException e) {
                                 throw new IOException("Failed to retrieve record " + d + " from Provenance File " + file + " due to " + e, e);
                             }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexManager.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexManager.java
index 3943504..9c3ec31 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexManager.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexManager.java
@@ -41,65 +41,65 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class IndexManager implements Closeable {
-	private static final Logger logger = LoggerFactory.getLogger(IndexManager.class);
-	
-	private final Lock lock = new ReentrantLock();
-	private final Map<File, IndexWriterCount> writerCounts = new HashMap<>();
-	private final Map<File, List<ActiveIndexSearcher>> activeSearchers = new HashMap<>();
-	
-	
-	public void removeIndex(final File indexDirectory) {
-		final File absoluteFile = indexDirectory.getAbsoluteFile();
-		logger.info("Removing index {}", indexDirectory);
-		
-		lock.lock();
-		try {
-			final IndexWriterCount count = writerCounts.remove(absoluteFile);
-			if ( count != null ) {
-				try {
-					count.close();
-				} catch (final IOException ioe) {
-					logger.warn("Failed to close Index Writer {} for {}", count.getWriter(), absoluteFile);
-					if ( logger.isDebugEnabled() ) {
-						logger.warn("", ioe);
-					}
-				}
-			}
-			
-			for ( final List<ActiveIndexSearcher> searcherList : activeSearchers.values() ) {
-				for ( final ActiveIndexSearcher searcher : searcherList ) {
-					try {
-						searcher.close();
-					} catch (final IOException ioe) {
-						logger.warn("Failed to close Index Searcher {} for {} due to {}", 
-								searcher.getSearcher(), absoluteFile, ioe);
-						if ( logger.isDebugEnabled() ) {
-							logger.warn("", ioe);
-						}
-					}
-				}
-			}
-		} finally {
-			lock.unlock();
-		}
-	}
-	
-	public IndexWriter borrowIndexWriter(final File indexingDirectory) throws IOException {
-		final File absoluteFile = indexingDirectory.getAbsoluteFile();
-		logger.debug("Borrowing index writer for {}", indexingDirectory);
-		
-		lock.lock();
-		try {
-			IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
-			if ( writerCount == null ) {
-				final List<Closeable> closeables = new ArrayList<>();
+    private static final Logger logger = LoggerFactory.getLogger(IndexManager.class);
+
+    private final Lock lock = new ReentrantLock();
+    private final Map<File, IndexWriterCount> writerCounts = new HashMap<>();
+    private final Map<File, List<ActiveIndexSearcher>> activeSearchers = new HashMap<>();
+
+
+    public void removeIndex(final File indexDirectory) {
+        final File absoluteFile = indexDirectory.getAbsoluteFile();
+        logger.info("Removing index {}", indexDirectory);
+
+        lock.lock();
+        try {
+            final IndexWriterCount count = writerCounts.remove(absoluteFile);
+            if ( count != null ) {
+                try {
+                    count.close();
+                } catch (final IOException ioe) {
+                    logger.warn("Failed to close Index Writer {} for {}", count.getWriter(), absoluteFile);
+                    if ( logger.isDebugEnabled() ) {
+                        logger.warn("", ioe);
+                    }
+                }
+            }
+
+            for ( final List<ActiveIndexSearcher> searcherList : activeSearchers.values() ) {
+                for ( final ActiveIndexSearcher searcher : searcherList ) {
+                    try {
+                        searcher.close();
+                    } catch (final IOException ioe) {
+                        logger.warn("Failed to close Index Searcher {} for {} due to {}",
+                                searcher.getSearcher(), absoluteFile, ioe);
+                        if ( logger.isDebugEnabled() ) {
+                            logger.warn("", ioe);
+                        }
+                    }
+                }
+            }
+        } finally {
+            lock.unlock();
+        }
+    }
+
+    public IndexWriter borrowIndexWriter(final File indexingDirectory) throws IOException {
+        final File absoluteFile = indexingDirectory.getAbsoluteFile();
+        logger.debug("Borrowing index writer for {}", indexingDirectory);
+
+        lock.lock();
+        try {
+            IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
+            if ( writerCount == null ) {
+                final List<Closeable> closeables = new ArrayList<>();
                 final Directory directory = FSDirectory.open(indexingDirectory);
                 closeables.add(directory);
-                
+
                 try {
-                	final Analyzer analyzer = new StandardAnalyzer();
-                	closeables.add(analyzer);
-                	
+                    final Analyzer analyzer = new StandardAnalyzer();
+                    closeables.add(analyzer);
+
                     final IndexWriterConfig config = new IndexWriterConfig(LuceneUtil.LUCENE_VERSION, analyzer);
                     config.setWriteLockTimeout(300000L);
 
@@ -107,361 +107,361 @@ public class IndexManager implements Closeable {
                     writerCount = new IndexWriterCount(indexWriter, analyzer, directory, 1);
                     logger.debug("Providing new index writer for {}", indexingDirectory);
                 } catch (final IOException ioe) {
-                	for ( final Closeable closeable : closeables ) {
-                		try {
-                			closeable.close();
-                		} catch (final IOException ioe2) {
-                			ioe.addSuppressed(ioe2);
-                		}
-                	}
-                	
-                	throw ioe;
+                    for ( final Closeable closeable : closeables ) {
+                        try {
+                            closeable.close();
+                        } catch (final IOException ioe2) {
+                            ioe.addSuppressed(ioe2);
+                        }
+                    }
+
+                    throw ioe;
                 }
-                
+
                 writerCounts.put(absoluteFile, writerCount);
-			} else {
-				logger.debug("Providing existing index writer for {} and incrementing count to {}", indexingDirectory, writerCount.getCount() + 1);
-				writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
-						writerCount.getAnalyzer(), writerCount.getDirectory(), writerCount.getCount() + 1));
-			}
-			
-			return writerCount.getWriter();
-		} finally {
-			lock.unlock();
-		}
-	}
-	
-	public void returnIndexWriter(final File indexingDirectory, final IndexWriter writer) {
-		final File absoluteFile = indexingDirectory.getAbsoluteFile();
-		logger.debug("Returning Index Writer for {} to IndexManager", indexingDirectory);
-		
-		lock.lock();
-		try {
-			IndexWriterCount count = writerCounts.remove(absoluteFile);
-			
-			try {
-				if ( count == null ) {
-					logger.warn("Index Writer {} was returned to IndexManager for {}, but this writer is not known. "
-							+ "This could potentially lead to a resource leak", writer, indexingDirectory);
-					writer.close();
-				} else if ( count.getCount() <= 1 ) {
-					// we are finished with this writer.
-					logger.debug("Closing Index Writer for {}", indexingDirectory);
-					count.close();
-				} else {
-					// decrement the count.
-					logger.debug("Decrementing count for Index Writer for {} to {}", indexingDirectory, count.getCount() - 1);
-					writerCounts.put(absoluteFile, new IndexWriterCount(count.getWriter(), count.getAnalyzer(), count.getDirectory(), count.getCount() - 1));
-				}
-			} catch (final IOException ioe) {
-				logger.warn("Failed to close Index Writer {} due to {}", writer, ioe);
-				if ( logger.isDebugEnabled() ) {
-					logger.warn("", ioe);
-				}
-			}
-		} finally {
-			lock.unlock();
-		}
-	}
-
-	
-	public IndexSearcher borrowIndexSearcher(final File indexDir) throws IOException {
-		final File absoluteFile = indexDir.getAbsoluteFile();
-		logger.debug("Borrowing index searcher for {}", indexDir);
-		
-		lock.lock();
-		try {
-			// check if we already have a reader cached.
-			List<ActiveIndexSearcher> currentlyCached = activeSearchers.get(absoluteFile);
-			if ( currentlyCached == null ) {
-				currentlyCached = new ArrayList<>();
-				activeSearchers.put(absoluteFile, currentlyCached);
-			} else {
-				// keep track of any searchers that have been closed so that we can remove them
-				// from our cache later.
-				final Set<ActiveIndexSearcher> expired = new HashSet<>();
-				
-				try {
-					for ( final ActiveIndexSearcher searcher : currentlyCached ) {
-						if ( searcher.isCache() ) {
-							final int refCount = searcher.getSearcher().getIndexReader().getRefCount();
-							if ( refCount <= 0 ) {
-								// if refCount == 0, then the reader has been closed, so we need to discard the searcher
-								logger.debug("Reference count for cached Index Searcher for {} is currently {}; "
-									+ "removing cached searcher", absoluteFile, refCount);
-								expired.add(searcher);
-								continue;
-							}
-							
-							logger.debug("Providing previously cached index searcher for {}", indexDir);
-							return searcher.getSearcher();
-						}
-					}
-				} finally {
-					// if we have any expired index searchers, we need to close them and remove them
-					// from the cache so that we don't try to use them again later.
-					for ( final ActiveIndexSearcher searcher : expired ) {
-						try {
-							searcher.close();
-						} catch (final Exception e) {
-							logger.debug("Failed to close 'expired' IndexSearcher {}", searcher);
-						}
-						
-						currentlyCached.remove(searcher);
-					}
-				}
-			}
-			
-			IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
-			if ( writerCount == null ) {
-				final Directory directory = FSDirectory.open(absoluteFile);
-				logger.debug("No Index Writer currently exists for {}; creating a cachable reader", indexDir);
-				
-				try {
-					final DirectoryReader directoryReader = DirectoryReader.open(directory);
-					final IndexSearcher searcher = new IndexSearcher(directoryReader);
-					
-					// we want to cache the searcher that we create, since it's just a reader.
-					final ActiveIndexSearcher cached = new ActiveIndexSearcher(searcher, directoryReader, directory, true);
-					currentlyCached.add(cached);
-					
-					return cached.getSearcher();
-				} catch (final IOException e) {
-					try {
-						directory.close();
-					} catch (final IOException ioe) {
-						e.addSuppressed(ioe);
-					}
-					
-					throw e;
-				}
-			} else {
-				logger.debug("Index Writer currently exists for {}; creating a non-cachable reader and incrementing "
-						+ "counter to {}", indexDir, writerCount.getCount() + 1);
-
-				// increment the writer count to ensure that it's kept open.
-				writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
-						writerCount.getAnalyzer(), writerCount.getDirectory(), writerCount.getCount() + 1));
-				
-				// create a new Index Searcher from the writer so that we don't have an issue with trying
-				// to read from a directory that's locked. If we get the "no segments* file found" with
-				// Lucene, this indicates that an IndexWriter already has the directory open.
-				final IndexWriter writer = writerCount.getWriter();
-				final DirectoryReader directoryReader = DirectoryReader.open(writer, false);
-				final IndexSearcher searcher = new IndexSearcher(directoryReader);
-				
-				// we don't want to cache this searcher because it's based on a writer, so we want to get
-				// new values the next time that we search.
-				final ActiveIndexSearcher activeSearcher = new ActiveIndexSearcher(searcher, directoryReader, null, false);
-				
-				currentlyCached.add(activeSearcher);
-				return activeSearcher.getSearcher();
-			}
-		} finally {
-			lock.unlock();
-		}
-	}
-	
-	
-	public void returnIndexSearcher(final File indexDirectory, final IndexSearcher searcher) {
-		final File absoluteFile = indexDirectory.getAbsoluteFile();
-		logger.debug("Returning index searcher for {} to IndexManager", indexDirectory);
-		
-		lock.lock();
-		try {
-			// check if we already have a reader cached.
-			List<ActiveIndexSearcher> currentlyCached = activeSearchers.get(absoluteFile);
-			if ( currentlyCached == null ) {
-				logger.warn("Received Index Searcher for {} but no searcher was provided for that directory; this could "
-						+ "result in a resource leak", indexDirectory);
-				return;
-			}
-			
-			final Iterator<ActiveIndexSearcher> itr = currentlyCached.iterator();
-			while (itr.hasNext()) {
-				final ActiveIndexSearcher activeSearcher = itr.next();
-				if ( activeSearcher.getSearcher().equals(searcher) ) {
-					if ( activeSearcher.isCache() ) {
-						// the searcher is cached. Just leave it open.
-						logger.debug("Index searcher for {} is cached; leaving open", indexDirectory);
-						return;
-					} else {
-						// searcher is not cached. It was created from a writer, and we want
-						// the newest updates the next time that we get a searcher, so we will
-						// go ahead and close this one out.
-						itr.remove();
-						
-						// decrement the writer count because we incremented it when creating the searcher
-						final IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
-						if ( writerCount != null ) {
-							if ( writerCount.getCount() <= 1 ) {
-								try {
-									logger.debug("Index searcher for {} is not cached. Writer count is "
-											+ "decremented to {}; closing writer", indexDirectory, writerCount.getCount() - 1);
-									
-									writerCount.close();
-								} catch (final IOException ioe) {
-									logger.warn("Failed to close Index Writer for {} due to {}", absoluteFile, ioe);
-									if ( logger.isDebugEnabled() ) {
-										logger.warn("", ioe);
-									}
-								}
-							} else {
-								logger.debug("Index searcher for {} is not cached. Writer count is decremented "
-										+ "to {}; leaving writer open", indexDirectory, writerCount.getCount() - 1);
-								
-								writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
-									writerCount.getAnalyzer(), writerCount.getDirectory(), 
-									writerCount.getCount() - 1));
-							}
-						}
-
-						try {
-							logger.debug("Closing Index Searcher for {}", indexDirectory);
-							activeSearcher.close();
-						} catch (final IOException ioe) {
-							logger.warn("Failed to close Index Searcher for {} due to {}", absoluteFile, ioe);
-							if ( logger.isDebugEnabled() ) {
-								logger.warn("", ioe);
-							}
-						}
-					}
-				}
-			}
-		} finally {
-			lock.unlock();
-		}
-	}
-	
-	@Override
-	public void close() throws IOException {
-		logger.debug("Closing Index Manager");
-		
-		lock.lock();
-		try {
-			IOException ioe = null;
-			
-			for ( final IndexWriterCount count : writerCounts.values() ) {
-				try {
-					count.close();
-				} catch (final IOException e) {
-					if ( ioe == null ) {
-						ioe = e;
-					} else {
-						ioe.addSuppressed(e);
-					}
-				}
-			}
-			
-			for (final List<ActiveIndexSearcher> searcherList : activeSearchers.values()) {
-				for (final ActiveIndexSearcher searcher : searcherList) {
-					try {
-						searcher.close();
-					} catch (final IOException e) {
-						if ( ioe == null ) {
-							ioe = e;
-						} else {
-							ioe.addSuppressed(e);
-						}
-					}
-				}
-			}
-			
-			if ( ioe != null ) {
-				throw ioe;
-			}
-		} finally {
-			lock.unlock();
-		}
-	}
-
-	
-	private static void close(final Closeable... closeables) throws IOException {
-		IOException ioe = null;
-		for ( final Closeable closeable : closeables ) {
-			if ( closeable == null ) {
-				continue;
-			}
-			
-			try {
-				closeable.close();
-			} catch (final IOException e) {
-				if ( ioe == null ) {
-					ioe = e;
-				} else {
-					ioe.addSuppressed(e);
-				}
-			}
-		}
-		
-		if ( ioe != null ) {
-			throw ioe;
-		}
-	}
-	
-	
-	private static class ActiveIndexSearcher implements Closeable {
-		private final IndexSearcher searcher;
-		private final DirectoryReader directoryReader;
-		private final Directory directory;
-		private final boolean cache;
-		
-		public ActiveIndexSearcher(IndexSearcher searcher, DirectoryReader directoryReader, 
-				Directory directory, final boolean cache) {
-			this.searcher = searcher;
-			this.directoryReader = directoryReader;
-			this.directory = directory;
-			this.cache = cache;
-		}
-
-		public boolean isCache() {
-			return cache;
-		}
-
-		public IndexSearcher getSearcher() {
-			return searcher;
-		}
-		
-		@Override
-		public void close() throws IOException {
-			IndexManager.close(directoryReader, directory);
-		}
-	}
-	
-	
-	private static class IndexWriterCount implements Closeable {
-		private final IndexWriter writer;
-		private final Analyzer analyzer;
-		private final Directory directory;
-		private final int count;
-		
-		public IndexWriterCount(final IndexWriter writer, final Analyzer analyzer, final Directory directory, final int count) {
-			this.writer = writer;
-			this.analyzer = analyzer;
-			this.directory = directory;
-			this.count = count;
-		}
-
-		public Analyzer getAnalyzer() {
-			return analyzer;
-		}
-
-		public Directory getDirectory() {
-			return directory;
-		}
-
-		public IndexWriter getWriter() {
-			return writer;
-		}
-
-		public int getCount() {
-			return count;
-		}
-
-		@Override
-		public void close() throws IOException {
-			IndexManager.close(writer, analyzer, directory);
-		}
-	}
+            } else {
+                logger.debug("Providing existing index writer for {} and incrementing count to {}", indexingDirectory, writerCount.getCount() + 1);
+                writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
+                        writerCount.getAnalyzer(), writerCount.getDirectory(), writerCount.getCount() + 1));
+            }
+
+            return writerCount.getWriter();
+        } finally {
+            lock.unlock();
+        }
+    }
+
+    public void returnIndexWriter(final File indexingDirectory, final IndexWriter writer) {
+        final File absoluteFile = indexingDirectory.getAbsoluteFile();
+        logger.debug("Returning Index Writer for {} to IndexManager", indexingDirectory);
+
+        lock.lock();
+        try {
+            IndexWriterCount count = writerCounts.remove(absoluteFile);
+
+            try {
+                if ( count == null ) {
+                    logger.warn("Index Writer {} was returned to IndexManager for {}, but this writer is not known. "
+                            + "This could potentially lead to a resource leak", writer, indexingDirectory);
+                    writer.close();
+                } else if ( count.getCount() <= 1 ) {
+                    // we are finished with this writer.
+                    logger.debug("Closing Index Writer for {}", indexingDirectory);
+                    count.close();
+                } else {
+                    // decrement the count.
+                    logger.debug("Decrementing count for Index Writer for {} to {}", indexingDirectory, count.getCount() - 1);
+                    writerCounts.put(absoluteFile, new IndexWriterCount(count.getWriter(), count.getAnalyzer(), count.getDirectory(), count.getCount() - 1));
+                }
+            } catch (final IOException ioe) {
+                logger.warn("Failed to close Index Writer {} due to {}", writer, ioe);
+                if ( logger.isDebugEnabled() ) {
+                    logger.warn("", ioe);
+                }
+            }
+        } finally {
+            lock.unlock();
+        }
+    }
+
+
+    public IndexSearcher borrowIndexSearcher(final File indexDir) throws IOException {
+        final File absoluteFile = indexDir.getAbsoluteFile();
+        logger.debug("Borrowing index searcher for {}", indexDir);
+
+        lock.lock();
+        try {
+            // check if we already have a reader cached.
+            List<ActiveIndexSearcher> currentlyCached = activeSearchers.get(absoluteFile);
+            if ( currentlyCached == null ) {
+                currentlyCached = new ArrayList<>();
+                activeSearchers.put(absoluteFile, currentlyCached);
+            } else {
+                // keep track of any searchers that have been closed so that we can remove them
+                // from our cache later.
+                final Set<ActiveIndexSearcher> expired = new HashSet<>();
+
+                try {
+                    for ( final ActiveIndexSearcher searcher : currentlyCached ) {
+                        if ( searcher.isCache() ) {
+                            final int refCount = searcher.getSearcher().getIndexReader().getRefCount();
+                            if ( refCount <= 0 ) {
+                                // if refCount == 0, then the reader has been closed, so we need to discard the searcher
+                                logger.debug("Reference count for cached Index Searcher for {} is currently {}; "
+                                        + "removing cached searcher", absoluteFile, refCount);
+                                expired.add(searcher);
+                                continue;
+                            }
+
+                            logger.debug("Providing previously cached index searcher for {}", indexDir);
+                            return searcher.getSearcher();
+                        }
+                    }
+                } finally {
+                    // if we have any expired index searchers, we need to close them and remove them
+                    // from the cache so that we don't try to use them again later.
+                    for ( final ActiveIndexSearcher searcher : expired ) {
+                        try {
+                            searcher.close();
+                        } catch (final Exception e) {
+                            logger.debug("Failed to close 'expired' IndexSearcher {}", searcher);
+                        }
+
+                        currentlyCached.remove(searcher);
+                    }
+                }
+            }
+
+            IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
+            if ( writerCount == null ) {
+                final Directory directory = FSDirectory.open(absoluteFile);
+                logger.debug("No Index Writer currently exists for {}; creating a cachable reader", indexDir);
+
+                try {
+                    final DirectoryReader directoryReader = DirectoryReader.open(directory);
+                    final IndexSearcher searcher = new IndexSearcher(directoryReader);
+
+                    // we want to cache the searcher that we create, since it's just a reader.
+                    final ActiveIndexSearcher cached = new ActiveIndexSearcher(searcher, directoryReader, directory, true);
+                    currentlyCached.add(cached);
+
+                    return cached.getSearcher();
+                } catch (final IOException e) {
+                    try {
+                        directory.close();
+                    } catch (final IOException ioe) {
+                        e.addSuppressed(ioe);
+                    }
+
+                    throw e;
+                }
+            } else {
+                logger.debug("Index Writer currently exists for {}; creating a non-cachable reader and incrementing "
+                        + "counter to {}", indexDir, writerCount.getCount() + 1);
+
+                // increment the writer count to ensure that it's kept open.
+                writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
+                        writerCount.getAnalyzer(), writerCount.getDirectory(), writerCount.getCount() + 1));
+
+                // create a new Index Searcher from the writer so that we don't have an issue with trying
+                // to read from a directory that's locked. If we get the "no segments* file found" with
+                // Lucene, this indicates that an IndexWriter already has the directory open.
+                final IndexWriter writer = writerCount.getWriter();
+                final DirectoryReader directoryReader = DirectoryReader.open(writer, false);
+                final IndexSearcher searcher = new IndexSearcher(directoryReader);
+
+                // we don't want to cache this searcher because it's based on a writer, so we want to get
+                // new values the next time that we search.
+                final ActiveIndexSearcher activeSearcher = new ActiveIndexSearcher(searcher, directoryReader, null, false);
+
+                currentlyCached.add(activeSearcher);
+                return activeSearcher.getSearcher();
+            }
+        } finally {
+            lock.unlock();
+        }
+    }
+
+
+    public void returnIndexSearcher(final File indexDirectory, final IndexSearcher searcher) {
+        final File absoluteFile = indexDirectory.getAbsoluteFile();
+        logger.debug("Returning index searcher for {} to IndexManager", indexDirectory);
+
+        lock.lock();
+        try {
+            // check if we already have a reader cached.
+            List<ActiveIndexSearcher> currentlyCached = activeSearchers.get(absoluteFile);
+            if ( currentlyCached == null ) {
+                logger.warn("Received Index Searcher for {} but no searcher was provided for that directory; this could "
+                        + "result in a resource leak", indexDirectory);
+                return;
+            }
+
+            final Iterator<ActiveIndexSearcher> itr = currentlyCached.iterator();
+            while (itr.hasNext()) {
+                final ActiveIndexSearcher activeSearcher = itr.next();
+                if ( activeSearcher.getSearcher().equals(searcher) ) {
+                    if ( activeSearcher.isCache() ) {
+                        // the searcher is cached. Just leave it open.
+                        logger.debug("Index searcher for {} is cached; leaving open", indexDirectory);
+                        return;
+                    } else {
+                        // searcher is not cached. It was created from a writer, and we want
+                        // the newest updates the next time that we get a searcher, so we will
+                        // go ahead and close this one out.
+                        itr.remove();
+
+                        // decrement the writer count because we incremented it when creating the searcher
+                        final IndexWriterCount writerCount = writerCounts.remove(absoluteFile);
+                        if ( writerCount != null ) {
+                            if ( writerCount.getCount() <= 1 ) {
+                                try {
+                                    logger.debug("Index searcher for {} is not cached. Writer count is "
+                                            + "decremented to {}; closing writer", indexDirectory, writerCount.getCount() - 1);
+
+                                    writerCount.close();
+                                } catch (final IOException ioe) {
+                                    logger.warn("Failed to close Index Writer for {} due to {}", absoluteFile, ioe);
+                                    if ( logger.isDebugEnabled() ) {
+                                        logger.warn("", ioe);
+                                    }
+                                }
+                            } else {
+                                logger.debug("Index searcher for {} is not cached. Writer count is decremented "
+                                        + "to {}; leaving writer open", indexDirectory, writerCount.getCount() - 1);
+
+                                writerCounts.put(absoluteFile, new IndexWriterCount(writerCount.getWriter(),
+                                        writerCount.getAnalyzer(), writerCount.getDirectory(),
+                                        writerCount.getCount() - 1));
+                            }
+                        }
+
+                        try {
+                            logger.debug("Closing Index Searcher for {}", indexDirectory);
+                            activeSearcher.close();
+                        } catch (final IOException ioe) {
+                            logger.warn("Failed to close Index Searcher for {} due to {}", absoluteFile, ioe);
+                            if ( logger.isDebugEnabled() ) {
+                                logger.warn("", ioe);
+                            }
+                        }
+                    }
+                }
+            }
+        } finally {
+            lock.unlock();
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        logger.debug("Closing Index Manager");
+
+        lock.lock();
+        try {
+            IOException ioe = null;
+
+            for ( final IndexWriterCount count : writerCounts.values() ) {
+                try {
+                    count.close();
+                } catch (final IOException e) {
+                    if ( ioe == null ) {
+                        ioe = e;
+                    } else {
+                        ioe.addSuppressed(e);
+                    }
+                }
+            }
+
+            for (final List<ActiveIndexSearcher> searcherList : activeSearchers.values()) {
+                for (final ActiveIndexSearcher searcher : searcherList) {
+                    try {
+                        searcher.close();
+                    } catch (final IOException e) {
+                        if ( ioe == null ) {
+                            ioe = e;
+                        } else {
+                            ioe.addSuppressed(e);
+                        }
+                    }
+                }
+            }
+
+            if ( ioe != null ) {
+                throw ioe;
+            }
+        } finally {
+            lock.unlock();
+        }
+    }
+
+
+    private static void close(final Closeable... closeables) throws IOException {
+        IOException ioe = null;
+        for ( final Closeable closeable : closeables ) {
+            if ( closeable == null ) {
+                continue;
+            }
+
+            try {
+                closeable.close();
+            } catch (final IOException e) {
+                if ( ioe == null ) {
+                    ioe = e;
+                } else {
+                    ioe.addSuppressed(e);
+                }
+            }
+        }
+
+        if ( ioe != null ) {
+            throw ioe;
+        }
+    }
+
+
+    private static class ActiveIndexSearcher implements Closeable {
+        private final IndexSearcher searcher;
+        private final DirectoryReader directoryReader;
+        private final Directory directory;
+        private final boolean cache;
+
+        public ActiveIndexSearcher(IndexSearcher searcher, DirectoryReader directoryReader,
+                Directory directory, final boolean cache) {
+            this.searcher = searcher;
+            this.directoryReader = directoryReader;
+            this.directory = directory;
+            this.cache = cache;
+        }
+
+        public boolean isCache() {
+            return cache;
+        }
+
+        public IndexSearcher getSearcher() {
+            return searcher;
+        }
+
+        @Override
+        public void close() throws IOException {
+            IndexManager.close(directoryReader, directory);
+        }
+    }
+
+
+    private static class IndexWriterCount implements Closeable {
+        private final IndexWriter writer;
+        private final Analyzer analyzer;
+        private final Directory directory;
+        private final int count;
+
+        public IndexWriterCount(final IndexWriter writer, final Analyzer analyzer, final Directory directory, final int count) {
+            this.writer = writer;
+            this.analyzer = analyzer;
+            this.directory = directory;
+            this.count = count;
+        }
+
+        public Analyzer getAnalyzer() {
+            return analyzer;
+        }
+
+        public Directory getDirectory() {
+            return directory;
+        }
+
+        public IndexWriter getWriter() {
+            return writer;
+        }
+
+        public int getCount() {
+            return count;
+        }
+
+        @Override
+        public void close() throws IOException {
+            IndexManager.close(writer, analyzer, directory);
+        }
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexSearch.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexSearch.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexSearch.java
index dcb6e08..53869f4 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexSearch.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexSearch.java
@@ -35,7 +35,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class IndexSearch {
-	private final Logger logger = LoggerFactory.getLogger(IndexSearch.class);
+    private final Logger logger = LoggerFactory.getLogger(IndexSearch.class);
     private final PersistentProvenanceRepository repository;
     private final File indexDirectory;
     private final IndexManager indexManager;
@@ -65,17 +65,17 @@ public class IndexSearch {
         final long start = System.nanoTime();
         IndexSearcher searcher = null;
         try {
-        	searcher = indexManager.borrowIndexSearcher(indexDirectory);
+            searcher = indexManager.borrowIndexSearcher(indexDirectory);
             final long searchStartNanos = System.nanoTime();
             final long openSearcherNanos = searchStartNanos - start;
-            
+
             final TopDocs topDocs = searcher.search(luceneQuery, provenanceQuery.getMaxResults());
             final long finishSearch = System.nanoTime();
             final long searchNanos = finishSearch - searchStartNanos;
-            
-            logger.debug("Searching {} took {} millis; opening searcher took {} millis", this, 
-            		TimeUnit.NANOSECONDS.toMillis(searchNanos), TimeUnit.NANOSECONDS.toMillis(openSearcherNanos));
-            
+
+            logger.debug("Searching {} took {} millis; opening searcher took {} millis", this,
+                    TimeUnit.NANOSECONDS.toMillis(searchNanos), TimeUnit.NANOSECONDS.toMillis(openSearcherNanos));
+
             if (topDocs.totalHits == 0) {
                 sqr.update(Collections.<ProvenanceEventRecord>emptyList(), 0);
                 return sqr;
@@ -83,31 +83,31 @@ public class IndexSearch {
 
             final DocsReader docsReader = new DocsReader(repository.getConfiguration().getStorageDirectories());
             matchingRecords = docsReader.read(topDocs, searcher.getIndexReader(), repository.getAllLogFiles(), retrievedCount, provenanceQuery.getMaxResults());
-            
+
             final long readRecordsNanos = System.nanoTime() - finishSearch;
             logger.debug("Reading {} records took {} millis for {}", matchingRecords.size(), TimeUnit.NANOSECONDS.toMillis(readRecordsNanos), this);
-            
+
             sqr.update(matchingRecords, topDocs.totalHits);
             return sqr;
         } catch (final FileNotFoundException e) {
             // nothing has been indexed yet, or the data has already aged off
-        	logger.warn("Attempted to search Provenance Index {} but could not find the file due to {}", indexDirectory, e);
-        	if ( logger.isDebugEnabled() ) {
-        		logger.warn("", e);
-        	}
-        	
+            logger.warn("Attempted to search Provenance Index {} but could not find the file due to {}", indexDirectory, e);
+            if ( logger.isDebugEnabled() ) {
+                logger.warn("", e);
+            }
+
             sqr.update(Collections.<ProvenanceEventRecord>emptyList(), 0);
             return sqr;
         } finally {
-        	if ( searcher != null ) {
-        		indexManager.returnIndexSearcher(indexDirectory, searcher);
-        	}
+            if ( searcher != null ) {
+                indexManager.returnIndexSearcher(indexDirectory, searcher);
+            }
         }
     }
 
-    
+
     @Override
     public String toString() {
-    	return "IndexSearcher[" + indexDirectory + "]";
+        return "IndexSearcher[" + indexDirectory + "]";
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexingAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexingAction.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexingAction.java
index 5e87913..46be391 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexingAction.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/IndexingAction.java
@@ -16,50 +16,30 @@
  */
 package org.apache.nifi.provenance.lucene;
 
-import java.io.EOFException;
-import java.io.File;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.IntField;
 import org.apache.lucene.document.LongField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSDirectory;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
-import org.apache.nifi.provenance.IndexConfiguration;
 import org.apache.nifi.provenance.PersistentProvenanceRepository;
 import org.apache.nifi.provenance.ProvenanceEventType;
 import org.apache.nifi.provenance.SearchableFields;
 import org.apache.nifi.provenance.StandardProvenanceEventRecord;
-import org.apache.nifi.provenance.rollover.RolloverAction;
 import org.apache.nifi.provenance.search.SearchableField;
-import org.apache.nifi.provenance.serialization.RecordReader;
-import org.apache.nifi.provenance.serialization.RecordReaders;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-public class IndexingAction implements RolloverAction {
-
-    private final PersistentProvenanceRepository repository;
+public class IndexingAction {
     private final Set<SearchableField> nonAttributeSearchableFields;
     private final Set<SearchableField> attributeSearchableFields;
-    private final IndexConfiguration indexConfiguration;
-    private static final Logger logger = LoggerFactory.getLogger(IndexingAction.class);
-
-    public IndexingAction(final PersistentProvenanceRepository repo, final IndexConfiguration indexConfig) {
-        repository = repo;
-        indexConfiguration = indexConfig;
 
+    public IndexingAction(final PersistentProvenanceRepository repo) {
         attributeSearchableFields = Collections.unmodifiableSet(new HashSet<>(repo.getConfiguration().getSearchableAttributes()));
         nonAttributeSearchableFields = Collections.unmodifiableSet(new HashSet<>(repo.getConfiguration().getSearchableFields()));
     }
@@ -72,7 +52,7 @@ public class IndexingAction implements RolloverAction {
         doc.add(new StringField(field.getSearchableFieldName(), value.toLowerCase(), store));
     }
 
-    
+
     public void index(final StandardProvenanceEventRecord record, final IndexWriter indexWriter, final Integer blockIndex) throws IOException {
         final Map<String, String> attributes = record.getAttributes();
 
@@ -105,14 +85,14 @@ public class IndexingAction implements RolloverAction {
             doc.add(new LongField(SearchableFields.EventTime.getSearchableFieldName(), record.getEventTime(), Store.NO));
             doc.add(new LongField(SearchableFields.FileSize.getSearchableFieldName(), record.getFileSize(), Store.NO));
             doc.add(new StringField(FieldNames.STORAGE_FILENAME, storageFilename, Store.YES));
-            
+
             if ( blockIndex == null ) {
-            	doc.add(new LongField(FieldNames.STORAGE_FILE_OFFSET, record.getStorageByteOffset(), Store.YES));
+                doc.add(new LongField(FieldNames.STORAGE_FILE_OFFSET, record.getStorageByteOffset(), Store.YES));
             } else {
-	            doc.add(new IntField(FieldNames.BLOCK_INDEX, blockIndex, Store.YES));
-	            doc.add(new LongField(SearchableFields.Identifier.getSearchableFieldName(), record.getEventId(), Store.YES));
+                doc.add(new IntField(FieldNames.BLOCK_INDEX, blockIndex, Store.YES));
+                doc.add(new LongField(SearchableFields.Identifier.getSearchableFieldName(), record.getEventId(), Store.YES));
             }
-            
+
             for (final String lineageIdentifier : record.getLineageIdentifiers()) {
                 addField(doc, SearchableFields.LineageIdentifier, lineageIdentifier, Store.NO);
             }
@@ -150,87 +130,4 @@ public class IndexingAction implements RolloverAction {
             indexWriter.addDocument(doc);
         }
     }
-    
-    @Override
-    public File execute(final File fileRolledOver) throws IOException {
-        final File indexingDirectory = indexConfiguration.getWritableIndexDirectory(fileRolledOver);
-        int indexCount = 0;
-        long maxId = -1L;
-
-        try (final Directory directory = FSDirectory.open(indexingDirectory);
-                final Analyzer analyzer = new StandardAnalyzer()) {
-
-            final IndexWriterConfig config = new IndexWriterConfig(LuceneUtil.LUCENE_VERSION, analyzer);
-            config.setWriteLockTimeout(300000L);
-
-            try (final IndexWriter indexWriter = new IndexWriter(directory, config);
-                    final RecordReader reader = RecordReaders.newRecordReader(fileRolledOver, repository.getAllLogFiles())) {
-                StandardProvenanceEventRecord record;
-                while (true) {
-                	final Integer blockIndex;
-                	if ( reader.isBlockIndexAvailable() ) {
-                		blockIndex = reader.getBlockIndex();
-                	} else {
-                		blockIndex = null;
-                	}
-                	
-                    try {
-                        record = reader.nextRecord();
-                    } catch (final EOFException eof) {
-                        // system was restarted while writing to the log file. Nothing we can do here, so ignore this record.
-                        // On system restart, the FlowFiles should be back in their "original" queues, so the events will be re-created
-                        // when the data is re-processed
-                        break;
-                    }
-
-                    if (record == null) {
-                        break;
-                    }
-
-                    maxId = record.getEventId();
-
-                    index(record, indexWriter, blockIndex);
-                    indexCount++;
-                }
-
-                indexWriter.commit();
-            } catch (final EOFException eof) {
-                // nothing in the file. Move on.
-            }
-        } finally {
-            if (maxId >= -1) {
-                indexConfiguration.setMaxIdIndexed(maxId);
-            }
-        }
-
-        final File newFile = new File(fileRolledOver.getParent(),
-                LuceneUtil.substringBeforeLast(fileRolledOver.getName(), ".")
-                + ".indexed."
-                + LuceneUtil.substringAfterLast(fileRolledOver.getName(), "."));
-
-        boolean renamed = false;
-        for (int i = 0; i < 10 && !renamed; i++) {
-            renamed = fileRolledOver.renameTo(newFile);
-            if (!renamed) {
-                try {
-                    Thread.sleep(25L);
-                } catch (final InterruptedException e) {
-                }
-            }
-        }
-
-        if (renamed) {
-            logger.info("Finished indexing Provenance Log File {} to index {} with {} records indexed and renamed file to {}",
-                    fileRolledOver, indexingDirectory, indexCount, newFile);
-            return newFile;
-        } else {
-            logger.warn("Finished indexing Provenance Log File {} with {} records indexed but failed to rename file to {}; indexed {} records", new Object[]{fileRolledOver, indexCount, newFile, indexCount});
-            return fileRolledOver;
-        }
-    }
-
-    @Override
-    public boolean hasBeenPerformed(final File fileRolledOver) {
-        return fileRolledOver.getName().contains(".indexed.");
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
index 54cde15..3f75c00 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
@@ -48,7 +48,8 @@ public class LineageQuery {
     public static final int MAX_LINEAGE_UUIDS = 100;
     private static final Logger logger = LoggerFactory.getLogger(LineageQuery.class);
 
-    public static Set<ProvenanceEventRecord> computeLineageForFlowFiles(final PersistentProvenanceRepository repo, final File indexDirectory, final String lineageIdentifier, final Collection<String> flowFileUuids) throws IOException {
+    public static Set<ProvenanceEventRecord> computeLineageForFlowFiles(final PersistentProvenanceRepository repo, final File indexDirectory,
+            final String lineageIdentifier, final Collection<String> flowFileUuids) throws IOException {
         if (requireNonNull(flowFileUuids).size() > MAX_LINEAGE_UUIDS) {
             throw new IllegalArgumentException(String.format("Cannot compute lineage for more than %s FlowFiles. This lineage contains %s.", MAX_LINEAGE_UUIDS, flowFileUuids.size()));
         }
@@ -99,7 +100,8 @@ public class LineageQuery {
             final DocsReader docsReader = new DocsReader(repo.getConfiguration().getStorageDirectories());
             final Set<ProvenanceEventRecord> recs = docsReader.read(uuidQueryTopDocs, indexReader, repo.getAllLogFiles(), new AtomicInteger(0), Integer.MAX_VALUE);
             final long readDocsEnd = System.nanoTime();
-            logger.debug("Finished Lineage Query; Lucene search took {} millis, reading records took {} millis", TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
+            logger.debug("Finished Lineage Query; Lucene search took {} millis, reading records took {} millis",
+                    TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
 
             return recs;
         }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LuceneUtil.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LuceneUtil.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LuceneUtil.java
index 59dc10b..c622ea1 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LuceneUtil.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LuceneUtil.java
@@ -78,16 +78,16 @@ public class LuceneUtil {
         final String searchString = baseName + ".";
         for (final Path path : allProvenanceLogs) {
             if (path.toFile().getName().startsWith(searchString)) {
-            	final File file = path.toFile();
-            	if ( file.exists() ) {
-            		matchingFiles.add(file);
-            	} else {
-            		final File dir = file.getParentFile();
-            		final File gzFile = new File(dir, file.getName() + ".gz");
-            		if ( gzFile.exists() ) {
-            			matchingFiles.add(gzFile);
-            		}
-            	}
+                final File file = path.toFile();
+                if ( file.exists() ) {
+                    matchingFiles.add(file);
+                } else {
+                    final File dir = file.getParentFile();
+                    final File gzFile = new File(dir, file.getName() + ".gz");
+                    if ( gzFile.exists() ) {
+                        matchingFiles.add(gzFile);
+                    }
+                }
             }
         }
 
@@ -144,16 +144,16 @@ public class LuceneUtil {
                 final IndexableField fileOffset1 = o1.getField(FieldNames.BLOCK_INDEX);
                 final IndexableField fileOffset2 = o1.getField(FieldNames.BLOCK_INDEX);
                 if ( fileOffset1 != null && fileOffset2 != null ) {
-                	final int blockIndexResult = Long.compare(fileOffset1.numericValue().longValue(), fileOffset2.numericValue().longValue());
-                	if ( blockIndexResult != 0 ) {
-                		return blockIndexResult;
-                	}
-                	
-                	final long eventId1 = o1.getField(SearchableFields.Identifier.getSearchableFieldName()).numericValue().longValue();
-                	final long eventId2 = o2.getField(SearchableFields.Identifier.getSearchableFieldName()).numericValue().longValue();
-                	return Long.compare(eventId1, eventId2);
+                    final int blockIndexResult = Long.compare(fileOffset1.numericValue().longValue(), fileOffset2.numericValue().longValue());
+                    if ( blockIndexResult != 0 ) {
+                        return blockIndexResult;
+                    }
+
+                    final long eventId1 = o1.getField(SearchableFields.Identifier.getSearchableFieldName()).numericValue().longValue();
+                    final long eventId2 = o2.getField(SearchableFields.Identifier.getSearchableFieldName()).numericValue().longValue();
+                    return Long.compare(eventId1, eventId2);
                 }
-                
+
                 final long offset1 = o1.getField(FieldNames.STORAGE_FILE_OFFSET).numericValue().longValue();
                 final long offset2 = o2.getField(FieldNames.STORAGE_FILE_OFFSET).numericValue().longValue();
                 return Long.compare(offset1, offset2);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/CompressionAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/CompressionAction.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/CompressionAction.java
deleted file mode 100644
index d014618..0000000
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/CompressionAction.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.provenance.rollover;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-import org.apache.nifi.stream.io.GZIPOutputStream;
-import org.apache.nifi.stream.io.StreamUtils;
-import org.apache.nifi.provenance.lucene.IndexingAction;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class CompressionAction implements RolloverAction {
-
-    private static final Logger logger = LoggerFactory.getLogger(IndexingAction.class);
-
-    @Override
-    public File execute(final File fileRolledOver) throws IOException {
-        final File gzFile = new File(fileRolledOver.getParent(), fileRolledOver.getName() + ".gz");
-        try (final FileInputStream in = new FileInputStream(fileRolledOver);
-                final OutputStream fos = new FileOutputStream(gzFile);
-                final GZIPOutputStream gzipOut = new GZIPOutputStream(fos, 1)) {
-            StreamUtils.copy(in, gzipOut);
-            in.getFD().sync();
-        }
-
-        boolean deleted = false;
-        for (int i = 0; i < 10 && !deleted; i++) {
-            deleted = fileRolledOver.delete();
-        }
-
-        logger.info("Finished compressing Provenance Log File {}", fileRolledOver);
-        return gzFile;
-    }
-
-    @Override
-    public boolean hasBeenPerformed(final File fileRolledOver) {
-        return fileRolledOver.getName().contains(".gz");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/RolloverAction.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/RolloverAction.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/RolloverAction.java
deleted file mode 100644
index 33401e9..0000000
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/rollover/RolloverAction.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.provenance.rollover;
-
-import java.io.File;
-import java.io.IOException;
-
-public interface RolloverAction {
-
-    /**
-     * Performs some action against the given File and returns the new File that
-     * contains the modified version
-     *
-     * @param fileRolledOver
-     * @return
-     * @throws IOException
-     */
-    File execute(File fileRolledOver) throws IOException;
-
-    boolean hasBeenPerformed(File fileRolledOver);
-}

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReader.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReader.java
index 8bdc88a..91c8222 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReader.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReader.java
@@ -24,75 +24,80 @@ import org.apache.nifi.provenance.toc.TocReader;
 
 public interface RecordReader extends Closeable {
 
-	/**
-	 * Returns the next record in the reader, or <code>null</code> if there is no more data available.
-	 * @return
-	 * @throws IOException
-	 */
+    /**
+     * Returns the next record in the reader, or <code>null</code> if there is no more data available.
+     * @return the next Provenance event in the stream
+     * @throws IOException if unable to read the next event from the stream
+     */
     StandardProvenanceEventRecord nextRecord() throws IOException;
 
     /**
      * Skips the specified number of bytes
-     * @param bytesToSkip
-     * @throws IOException
+     * @param bytesToSkip the number of bytes to skip ahead
+     * @throws IOException if unable to skip ahead the specified number of bytes (e.g., the stream does
+     *  not contain this many more bytes)
      */
     void skip(long bytesToSkip) throws IOException;
 
     /**
      * Skips to the specified byte offset in the underlying stream.
-     * @param position
+     * @param position the byte offset to skip to
      * @throws IOException if the underlying stream throws IOException, or if the reader has already
      * passed the specified byte offset
      */
     void skipTo(long position) throws IOException;
-    
+
     /**
      * Skips to the specified compression block
-     * 
-     * @param blockIndex
+     *
+     * @param blockIndex the byte index to skip to
      * @throws IOException if the underlying stream throws IOException, or if the reader has already
      * read passed the specified compression block index
      * @throws IllegalStateException if the RecordReader does not have a TableOfContents associated with it
      */
     void skipToBlock(int blockIndex) throws IOException;
-    
+
     /**
      * Returns the block index that the Reader is currently reading from.
      * Note that the block index is incremented at the beginning of the {@link #nextRecord()}
-     * method. This means that this method will return the block from which the previous record was read, 
+     * method. This means that this method will return the block from which the previous record was read,
      * if calling {@link #nextRecord()} continually, not the block from which the next record will be read.
-     * @return
+     *
+     * @return the current block index
+     * @throws IllegalStateException if the reader is reading a provenance event file that does not contain
+     * a Table of Contents
      */
     int getBlockIndex();
-    
+
     /**
      * Returns <code>true</code> if the compression block index is available. It will be available
      * if and only if the reader is created with a TableOfContents
-     * 
-     * @return
+     *
+     * @return true if the reader is reading from an event file that has a Table of Contents
      */
     boolean isBlockIndexAvailable();
-    
+
     /**
      * Returns the {@link TocReader} that is used to keep track of compression blocks, if one exists,
      * <code>null</code> otherwise
-     * @return
+     *
+     * @return the TocReader if the underlying event file has an Table of Contents, <code>null</code> otherwise.
      */
     TocReader getTocReader();
-    
+
     /**
-     * Returns the number of bytes that have been consumed from the stream (read or skipped).
-     * @return
+     * @return the number of bytes that have been consumed from the stream (read or skipped).
      */
     long getBytesConsumed();
-    
+
     /**
      * Returns the ID of the last event in this record reader, or -1 if the reader has no records or
      * has already read through all records. Note: This method will consume the stream until the end,
      * so no more records will be available on this reader after calling this method.
-     * 
-     * @return
-     * @throws IOException
+     *
+     * @return the ID of the last event in this record reader, or -1 if the reader has no records or
+     * has already read through all records
+     * @throws IOException if unable to get id of the last event
      */
     long getMaxEventId() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReaders.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReaders.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReaders.java
index dff281c..cab5e6f 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReaders.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordReaders.java
@@ -37,75 +37,75 @@ public class RecordReaders {
         InputStream fis = null;
 
         try {
-	        if (!file.exists()) {
-	            if (provenanceLogFiles != null) {
-		            final String baseName = LuceneUtil.substringBefore(file.getName(), ".") + ".";
-		            for (final Path path : provenanceLogFiles) {
-		                if (path.toFile().getName().startsWith(baseName)) {
-		                    file = path.toFile();
-		                    break;
-		                }
-		            }
-	            }
-	        }
-	
-	        if ( file.exists() ) {
-	            try {
-	                fis = new FileInputStream(file);
-	            } catch (final FileNotFoundException fnfe) {
-	                fis = null;
-	            }
-	        }
-	        
-	        String filename = file.getName();
-	        openStream: while ( fis == null ) {
-	            final File dir = file.getParentFile();
-	            final String baseName = LuceneUtil.substringBefore(file.getName(), ".");
-	            
-	            // depending on which rollover actions have occurred, we could have 3 possibilities for the
-	            // filename that we need. The majority of the time, we will use the extension ".prov.indexed.gz"
-	            // because most often we are compressing on rollover and most often we have already finished
-	            // compressing by the time that we are querying the data.
-	            for ( final String extension : new String[] {".prov.gz", ".prov"} ) {
-	                file = new File(dir, baseName + extension);
-	                if ( file.exists() ) {
-	                    try {
-	                        fis = new FileInputStream(file);
-	                        filename = baseName + extension;
-	                        break openStream;
-	                    } catch (final FileNotFoundException fnfe) {
-	                        // file was modified by a RolloverAction after we verified that it exists but before we could
-	                        // create an InputStream for it. Start over.
-	                        fis = null;
-	                        continue openStream;
-	                    }
-	                }
-	            }
-	            
-	            break;
-	        }
-	
-	        if ( fis == null ) {
-	            throw new FileNotFoundException("Unable to locate file " + originalFile);
-	        }
-	
-	    	final File tocFile = TocUtil.getTocFile(file);
-	    	if ( tocFile.exists() ) {
-	    		final TocReader tocReader = new StandardTocReader(tocFile);
-	    		return new StandardRecordReader(fis, filename, tocReader);
-	    	} else {
-	    		return new StandardRecordReader(fis, filename);
-	    	}
+            if (!file.exists()) {
+                if (provenanceLogFiles != null) {
+                    final String baseName = LuceneUtil.substringBefore(file.getName(), ".") + ".";
+                    for (final Path path : provenanceLogFiles) {
+                        if (path.toFile().getName().startsWith(baseName)) {
+                            file = path.toFile();
+                            break;
+                        }
+                    }
+                }
+            }
+
+            if ( file.exists() ) {
+                try {
+                    fis = new FileInputStream(file);
+                } catch (final FileNotFoundException fnfe) {
+                    fis = null;
+                }
+            }
+
+            String filename = file.getName();
+            openStream: while ( fis == null ) {
+                final File dir = file.getParentFile();
+                final String baseName = LuceneUtil.substringBefore(file.getName(), ".");
+
+                // depending on which rollover actions have occurred, we could have 3 possibilities for the
+                // filename that we need. The majority of the time, we will use the extension ".prov.indexed.gz"
+                // because most often we are compressing on rollover and most often we have already finished
+                // compressing by the time that we are querying the data.
+                for ( final String extension : new String[] {".prov.gz", ".prov"} ) {
+                    file = new File(dir, baseName + extension);
+                    if ( file.exists() ) {
+                        try {
+                            fis = new FileInputStream(file);
+                            filename = baseName + extension;
+                            break openStream;
+                        } catch (final FileNotFoundException fnfe) {
+                            // file was modified by a RolloverAction after we verified that it exists but before we could
+                            // create an InputStream for it. Start over.
+                            fis = null;
+                            continue openStream;
+                        }
+                    }
+                }
+
+                break;
+            }
+
+            if ( fis == null ) {
+                throw new FileNotFoundException("Unable to locate file " + originalFile);
+            }
+
+            final File tocFile = TocUtil.getTocFile(file);
+            if ( tocFile.exists() ) {
+                final TocReader tocReader = new StandardTocReader(tocFile);
+                return new StandardRecordReader(fis, filename, tocReader);
+            } else {
+                return new StandardRecordReader(fis, filename);
+            }
         } catch (final IOException ioe) {
-        	if ( fis != null ) {
-        		try {
-        			fis.close();
-        		} catch (final IOException inner) {
-        			ioe.addSuppressed(inner);
-        		}
-        	}
-        	
-        	throw ioe;
+            if ( fis != null ) {
+                try {
+                    fis.close();
+                } catch (final IOException inner) {
+                    ioe.addSuppressed(inner);
+                }
+            }
+
+            throw ioe;
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriter.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriter.java
index 58f4dc2..d89fd6f 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriter.java
@@ -28,31 +28,27 @@ public interface RecordWriter extends Closeable {
     /**
      * Writes header information to the underlying stream
      *
-     * @throws IOException
+     * @throws IOException if unable to write header information to the underlying stream
      */
     void writeHeader() throws IOException;
 
     /**
      * Writes the given record out to the underlying stream
      *
-     * @param record
-     * @param recordIdentifier
+     * @param record the record to write
+     * @param recordIdentifier the new identifier of the record
      * @return the number of bytes written for the given records
-     * @throws IOException
+     * @throws IOException if unable to write the record to the stream
      */
     long writeRecord(ProvenanceEventRecord record, long recordIdentifier) throws IOException;
 
     /**
-     * Returns the number of Records that have been written to this RecordWriter
-     *
-     * @return
+     * @return the number of Records that have been written to this RecordWriter
      */
     int getRecordsWritten();
 
     /**
-     * Returns the file that this RecordWriter is writing to
-     *
-     * @return
+     * @return the file that this RecordWriter is writing to
      */
     File getFile();
 
@@ -73,19 +69,18 @@ public interface RecordWriter extends Closeable {
      * not immediately available, returns <code>false</code>; otherwise, obtains
      * the lock and returns <code>true</code>.
      *
-     * @return
+     * @return <code>true</code> if the lock was obtained, <code>false</code> otherwise.
      */
     boolean tryLock();
 
     /**
      * Syncs the content written to this writer to disk.
-     * @throws java.io.IOException
+     * @throws IOException if unable to sync content to disk
      */
     void sync() throws IOException;
 
     /**
-     * Returns the TOC Writer that is being used to write the Table of Contents for this journal
-     * @return
+     * @return the TOC Writer that is being used to write the Table of Contents for this journal
      */
     TocWriter getTocWriter();
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/3cd18b0b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriters.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriters.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriters.java
index 47b7c7e..cf8f7b4 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriters.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/serialization/RecordWriters.java
@@ -25,14 +25,14 @@ import org.apache.nifi.provenance.toc.TocUtil;
 import org.apache.nifi.provenance.toc.TocWriter;
 
 public class RecordWriters {
-	private static final int DEFAULT_COMPRESSION_BLOCK_SIZE = 1024 * 1024;	// 1 MB
+    private static final int DEFAULT_COMPRESSION_BLOCK_SIZE = 1024 * 1024; // 1 MB
 
     public static RecordWriter newRecordWriter(final File file, final boolean compressed, final boolean createToc) throws IOException {
-    	return newRecordWriter(file, compressed, createToc, DEFAULT_COMPRESSION_BLOCK_SIZE);
+        return newRecordWriter(file, compressed, createToc, DEFAULT_COMPRESSION_BLOCK_SIZE);
     }
-    
+
     public static RecordWriter newRecordWriter(final File file, final boolean compressed, final boolean createToc, final int compressionBlockBytes) throws IOException {
-    	final TocWriter tocWriter = createToc ? new StandardTocWriter(TocUtil.getTocFile(file), false, false) : null;
+        final TocWriter tocWriter = createToc ? new StandardTocWriter(TocUtil.getTocFile(file), false, false) : null;
         return new StandardRecordWriter(file, tocWriter, compressed, compressionBlockBytes);
     }
 


[33/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/a52cf529
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/a52cf529
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/a52cf529

Branch: refs/heads/NIFI-292
Commit: a52cf52948d3774f07f8713c5010ee4d7d768a75
Parents: 1eb4387
Author: joewitt <jo...@apache.org>
Authored: Mon Apr 27 21:34:55 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Mon Apr 27 21:34:55 2015 -0400

----------------------------------------------------------------------
 .../nifi/web/api/dto/BulletinBoardDTO.java      |  3 +-
 .../apache/nifi/web/api/dto/BulletinDTO.java    |  7 +--
 .../nifi/web/api/dto/BulletinQueryDTO.java      |  3 +-
 .../apache/nifi/web/api/dto/ConnectableDTO.java |  6 +--
 .../apache/nifi/web/api/dto/ConnectionDTO.java  | 25 ++++------
 .../web/api/dto/ControllerConfigurationDTO.java |  6 +--
 .../apache/nifi/web/api/dto/ControllerDTO.java  | 17 ++-----
 .../nifi/web/api/dto/ControllerServiceDTO.java  | 10 ++--
 ...ontrollerServiceReferencingComponentDTO.java | 19 +++-----
 .../org/apache/nifi/web/api/dto/CounterDTO.java |  3 +-
 .../org/apache/nifi/web/api/dto/LabelDTO.java   |  1 -
 .../nifi/web/api/dto/NiFiComponentDTO.java      |  3 +-
 .../org/apache/nifi/web/api/dto/PortDTO.java    | 10 ++--
 .../nifi/web/api/dto/ProcessGroupDTO.java       |  3 +-
 .../nifi/web/api/dto/ProcessorConfigDTO.java    | 32 ++++---------
 .../apache/nifi/web/api/dto/ProcessorDTO.java   | 10 ++--
 .../nifi/web/api/dto/PropertyDescriptorDTO.java | 22 +++------
 .../nifi/web/api/dto/RemoteProcessGroupDTO.java | 15 ++----
 .../web/api/dto/RemoteProcessGroupPortDTO.java  |  3 +-
 .../nifi/web/api/dto/ReportingTaskDTO.java      | 19 +++-----
 .../apache/nifi/web/api/dto/RevisionDTO.java    | 11 ++---
 .../org/apache/nifi/web/api/dto/SnippetDTO.java | 48 +++++++-------------
 .../org/apache/nifi/web/api/dto/UserDTO.java    |  3 +-
 .../web/api/dto/provenance/ProvenanceDTO.java   |  3 +-
 .../api/dto/provenance/ProvenanceEventDTO.java  | 35 +++++---------
 .../provenance/lineage/LineageRequestDTO.java   |  7 +--
 .../provenance/lineage/ProvenanceNodeDTO.java   |  3 +-
 .../web/api/dto/status/ControllerStatusDTO.java |  3 +-
 .../nifi/web/api/dto/status/PortStatusDTO.java  |  6 +--
 .../api/dto/status/ProcessGroupStatusDTO.java   |  9 ++--
 .../web/api/dto/status/ProcessorStatusDTO.java  |  9 ++--
 .../dto/status/RemoteProcessGroupStatusDTO.java |  6 +--
 .../apache/nifi/web/api/entity/AboutEntity.java |  4 +-
 .../nifi/web/api/entity/ActionEntity.java       |  4 +-
 .../nifi/web/api/entity/AuthorityEntity.java    |  4 +-
 .../nifi/web/api/entity/BannerEntity.java       |  4 +-
 .../web/api/entity/BulletinBoardEntity.java     |  4 +-
 .../entity/ClusterConnectionStatusEntity.java   |  4 +-
 .../nifi/web/api/entity/ClusterEntity.java      |  4 +-
 .../web/api/entity/ClusterPortStatusEntity.java |  4 +-
 .../entity/ClusterProcessGroupStatusEntity.java |  4 +-
 .../entity/ClusterProcessorStatusEntity.java    |  4 +-
 .../ClusterRemoteProcessGroupStatusEntity.java  |  5 +-
 .../api/entity/ClusterSearchResultsEntity.java  |  4 +-
 .../web/api/entity/ClusterStatusEntity.java     |  4 +-
 .../api/entity/ClusterStatusHistoryEntity.java  |  4 +-
 .../web/api/entity/ComponentHistoryEntity.java  |  4 +-
 .../nifi/web/api/entity/ConnectionEntity.java   |  4 +-
 .../nifi/web/api/entity/ConnectionsEntity.java  |  4 +-
 .../entity/ControllerConfigurationEntity.java   |  4 +-
 .../nifi/web/api/entity/ControllerEntity.java   |  4 +-
 .../web/api/entity/ControllerServiceEntity.java |  4 +-
 ...ollerServiceReferencingComponentsEntity.java |  8 ++--
 .../entity/ControllerServiceTypesEntity.java    |  4 +-
 .../api/entity/ControllerServicesEntity.java    |  4 +-
 .../web/api/entity/ControllerStatusEntity.java  |  4 +-
 .../nifi/web/api/entity/CounterEntity.java      |  4 +-
 .../nifi/web/api/entity/CountersEntity.java     |  7 +--
 .../nifi/web/api/entity/FlowSnippetEntity.java  |  4 +-
 .../nifi/web/api/entity/FunnelEntity.java       |  4 +-
 .../nifi/web/api/entity/FunnelsEntity.java      |  4 +-
 .../nifi/web/api/entity/HistoryEntity.java      |  4 +-
 .../nifi/web/api/entity/InputPortEntity.java    |  4 +-
 .../nifi/web/api/entity/InputPortsEntity.java   |  4 +-
 .../apache/nifi/web/api/entity/LabelEntity.java |  4 +-
 .../nifi/web/api/entity/LabelsEntity.java       |  4 +-
 .../nifi/web/api/entity/LineageEntity.java      |  4 +-
 .../apache/nifi/web/api/entity/NodeEntity.java  |  4 +-
 .../nifi/web/api/entity/NodeStatusEntity.java   |  4 +-
 .../api/entity/NodeSystemDiagnosticsEntity.java |  4 +-
 .../nifi/web/api/entity/OutputPortEntity.java   |  4 +-
 .../nifi/web/api/entity/OutputPortsEntity.java  |  4 +-
 .../web/api/entity/PrioritizerTypesEntity.java  |  4 +-
 .../nifi/web/api/entity/ProcessGroupEntity.java |  4 +-
 .../api/entity/ProcessGroupStatusEntity.java    |  4 +-
 .../web/api/entity/ProcessGroupsEntity.java     |  4 +-
 .../nifi/web/api/entity/ProcessorEntity.java    |  4 +-
 .../web/api/entity/ProcessorTypesEntity.java    |  4 +-
 .../nifi/web/api/entity/ProcessorsEntity.java   |  4 +-
 .../api/entity/PropertyDescriptorEntity.java    |  4 +-
 .../web/api/entity/ProvenanceEventEntity.java   |  4 +-
 .../web/api/entity/ProvenanceOptionsEntity.java |  4 +-
 .../api/entity/RemoteProcessGroupEntity.java    |  4 +-
 .../entity/RemoteProcessGroupPortEntity.java    |  4 +-
 .../api/entity/RemoteProcessGroupsEntity.java   |  4 +-
 .../web/api/entity/ReportingTaskEntity.java     |  4 +-
 .../api/entity/ReportingTaskTypesEntity.java    |  4 +-
 .../web/api/entity/ReportingTasksEntity.java    |  4 +-
 .../web/api/entity/SearchResultsEntity.java     |  6 +--
 .../nifi/web/api/entity/SnippetEntity.java      |  4 +-
 .../web/api/entity/StatusHistoryEntity.java     |  4 +-
 .../web/api/entity/SystemDiagnosticsEntity.java |  4 +-
 .../nifi/web/api/entity/TemplateEntity.java     |  4 +-
 .../nifi/web/api/entity/TemplatesEntity.java    |  4 +-
 .../apache/nifi/web/api/entity/UserEntity.java  |  4 +-
 .../nifi/web/api/entity/UserGroupEntity.java    |  4 +-
 .../web/api/entity/UserSearchResultsEntity.java |  5 +-
 .../apache/nifi/web/api/entity/UsersEntity.java |  4 +-
 98 files changed, 187 insertions(+), 448 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinBoardDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinBoardDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinBoardDTO.java
index a71484e..5e9440d 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinBoardDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinBoardDTO.java
@@ -24,8 +24,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 import org.apache.nifi.web.api.dto.util.TimeAdapter;
 
 /**
- * The contents for the bulletin board including the bulletins and the timestamp
- * when the board was generated.
+ * The contents for the bulletin board including the bulletins and the timestamp when the board was generated.
  */
 @XmlType(name = "bulletinBoard")
 public class BulletinBoardDTO {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinDTO.java
index 239e710..7ae77bb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinDTO.java
@@ -22,9 +22,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 import org.apache.nifi.web.api.dto.util.TimeAdapter;
 
 /**
- * A bulletin that represents a notification about a passing event including,
- * the source component (if applicable), the timestamp, the message, and where
- * the bulletin originated (if applicable).
+ * A bulletin that represents a notification about a passing event including, the source component (if applicable), the timestamp, the message, and where the bulletin originated (if applicable).
  */
 @XmlType(name = "bulletin")
 public class BulletinDTO {
@@ -51,8 +49,7 @@ public class BulletinDTO {
     }
 
     /**
-     * @return When clustered, the address of the node from which this bulletin
-     * originated
+     * @return When clustered, the address of the node from which this bulletin originated
      */
     public String getNodeAddress() {
         return nodeAddress;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinQueryDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinQueryDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinQueryDTO.java
index cf4146d..4b060f1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinQueryDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/BulletinQueryDTO.java
@@ -19,8 +19,7 @@ package org.apache.nifi.web.api.dto;
 import javax.xml.bind.annotation.XmlType;
 
 /**
- * A query for bulletin board. Will filter the resulting bulletin board
- * according to the criteria in this query.
+ * A query for bulletin board. Will filter the resulting bulletin board according to the criteria in this query.
  */
 @XmlType(name = "bulletinQuery")
 public class BulletinQueryDTO {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectableDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectableDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectableDTO.java
index 199c73e..7432a72 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectableDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectableDTO.java
@@ -89,8 +89,7 @@ public class ConnectableDTO {
     }
 
     /**
-     * @return If this represents a remote port it is used to indicate whether the
-     * target exists
+     * @return If this represents a remote port it is used to indicate whether the target exists
      */
     public Boolean getExists() {
         return exists;
@@ -101,8 +100,7 @@ public class ConnectableDTO {
     }
 
     /**
-     * @return If this represents a remote port it is used to indicate whether is it
-     * configured to transmit
+     * @return If this represents a remote port it is used to indicate whether is it configured to transmit
      */
     public Boolean getTransmitting() {
         return transmitting;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectionDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectionDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectionDTO.java
index 1bd382e..432f80a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectionDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ConnectionDTO.java
@@ -89,8 +89,7 @@ public class ConnectionDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return The index of control point that the connection label should be placed
-     * over
+     * @return The index of control point that the connection label should be placed over
      */
     public Integer getLabelIndex() {
         return labelIndex;
@@ -125,8 +124,7 @@ public class ConnectionDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return relationships that the source of the connection currently supports.
-     * This property is read only
+     * @return relationships that the source of the connection currently supports. This property is read only
      */
     public Set<String> getAvailableRelationships() {
         return availableRelationships;
@@ -137,10 +135,8 @@ public class ConnectionDTO extends NiFiComponentDTO {
     }
 
     /**
-     * The object count threshold for determining when back pressure is applied.
-     * Updating this value is a passive change in the sense that it won't impact
-     * whether existing files over the limit are affected but it does help
-     * feeder processors to stop pushing too much into this work queue.
+     * The object count threshold for determining when back pressure is applied. Updating this value is a passive change in the sense that it won't impact whether existing files over the limit are
+     * affected but it does help feeder processors to stop pushing too much into this work queue.
      *
      * @return The back pressure object threshold
      */
@@ -153,11 +149,8 @@ public class ConnectionDTO extends NiFiComponentDTO {
     }
 
     /**
-     * The object data size threshold for determining when back pressure is
-     * applied. Updating this value is a passive change in the sense that it
-     * won't impact whether existing files over the limit are affected but it
-     * does help feeder processors to stop pushing too much into this work
-     * queue.
+     * The object data size threshold for determining when back pressure is applied. Updating this value is a passive change in the sense that it won't impact whether existing files over the limit are
+     * affected but it does help feeder processors to stop pushing too much into this work queue.
      *
      * @return The back pressure data size threshold
      */
@@ -170,10 +163,8 @@ public class ConnectionDTO extends NiFiComponentDTO {
     }
 
     /**
-     * The amount of time a flow file may be in the flow before it will be
-     * automatically aged out of the flow. Once a flow file reaches this age it
-     * will be terminated from the flow the next time a processor attempts to
-     * start work on it.
+     * The amount of time a flow file may be in the flow before it will be automatically aged out of the flow. Once a flow file reaches this age it will be terminated from the flow the next time a
+     * processor attempts to start work on it.
      *
      * @return The flow file expiration in minutes
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerConfigurationDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerConfigurationDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerConfigurationDTO.java
index 8e09fe7..c6f36f3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerConfigurationDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerConfigurationDTO.java
@@ -86,8 +86,7 @@ public class ControllerConfigurationDTO {
     }
 
     /**
-     * @return interval in seconds between the automatic NiFi refresh requests. This
-     * value is read only
+     * @return interval in seconds between the automatic NiFi refresh requests. This value is read only
      */
     public Long getAutoRefreshIntervalSeconds() {
         return autoRefreshIntervalSeconds;
@@ -98,8 +97,7 @@ public class ControllerConfigurationDTO {
     }
 
     /**
-     * @return Indicates whether or not Site-to-Site communications with this instance
-     * is secure (2-way authentication). This value is read only
+     * @return Indicates whether or not Site-to-Site communications with this instance is secure (2-way authentication). This value is read only
      */
     public Boolean isSiteToSiteSecure() {
         return siteToSiteSecure;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerDTO.java
index c5ee057..34008e0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerDTO.java
@@ -20,9 +20,7 @@ import java.util.Set;
 import javax.xml.bind.annotation.XmlType;
 
 /**
- * Configuration details for a NiFi controller. Primary use of this DTO is for
- * consumption by a remote NiFi instance to initiate site to site
- * communications.
+ * Configuration details for a NiFi controller. Primary use of this DTO is for consumption by a remote NiFi instance to initiate site to site communications.
  */
 @XmlType(name = "controller")
 public class ControllerDTO {
@@ -105,8 +103,7 @@ public class ControllerDTO {
     }
 
     /**
-     * @return Instance ID of the cluster, if this node is connected to a Cluster
-     * Manager, or of this individual instance of in standalone mode
+     * @return Instance ID of the cluster, if this node is connected to a Cluster Manager, or of this individual instance of in standalone mode
      */
     public String getInstanceId() {
         return instanceId;
@@ -117,12 +114,9 @@ public class ControllerDTO {
     }
 
     /**
-     * The Socket Port on which this instance is listening for Remote Transfers
-     * of Flow Files. If this instance is not configured to receive Flow Files
-     * from remote instances, this will be null.
+     * The Socket Port on which this instance is listening for Remote Transfers of Flow Files. If this instance is not configured to receive Flow Files from remote instances, this will be null.
      *
-     * @return a integer between 1 and 65535, or null, if not configured for
-     * remote transfer
+     * @return a integer between 1 and 65535, or null, if not configured for remote transfer
      */
     public Integer getRemoteSiteListeningPort() {
         return remoteSiteListeningPort;
@@ -133,8 +127,7 @@ public class ControllerDTO {
     }
 
     /**
-     * @return Indicates whether or not Site-to-Site communications with this instance
-     * is secure (2-way authentication)
+     * @return Indicates whether or not Site-to-Site communications with this instance is secure (2-way authentication)
      */
     public Boolean isSiteToSiteSecure() {
         return siteToSiteSecure;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceDTO.java
index 02ba2e2..8394705 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceDTO.java
@@ -88,8 +88,7 @@ public class ControllerServiceDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return The state of this controller service. Possible values are ENABLED,
-     * ENABLING, DISABLED, DISABLING
+     * @return The state of this controller service. Possible values are ENABLED, ENABLING, DISABLED, DISABLING
      */
     public String getState() {
         return state;
@@ -122,8 +121,7 @@ public class ControllerServiceDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return the URL for this controller services custom configuration UI if
-     * applicable. Null otherwise
+     * @return the URL for this controller services custom configuration UI if applicable. Null otherwise
      */
     public String getCustomUiUrl() {
         return customUiUrl;
@@ -156,9 +154,7 @@ public class ControllerServiceDTO extends NiFiComponentDTO {
     }
 
     /**
-     * Gets the validation errors from this controller service. These validation
-     * errors represent the problems with the controller service that must be
-     * resolved before it can be enabled.
+     * Gets the validation errors from this controller service. These validation errors represent the problems with the controller service that must be resolved before it can be enabled.
      *
      * @return The validation errors
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceReferencingComponentDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceReferencingComponentDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceReferencingComponentDTO.java
index 4b557e1..f927122 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceReferencingComponentDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ControllerServiceReferencingComponentDTO.java
@@ -22,9 +22,7 @@ import java.util.Set;
 import javax.xml.bind.annotation.XmlType;
 
 /**
- * A component referencing a controller service. This can either be another
- * controller service or a processor. Depending on the type of component
- * different properties may be set.
+ * A component referencing a controller service. This can either be another controller service or a processor. Depending on the type of component different properties may be set.
  */
 @XmlType(name = "controllerServiceReferencingComponent")
 public class ControllerServiceReferencingComponentDTO {
@@ -47,8 +45,7 @@ public class ControllerServiceReferencingComponentDTO {
     private Set<ControllerServiceReferencingComponentDTO> referencingComponents;
 
     /**
-     * @return Group id for this component referencing a controller service. If this
-     * component is another service, this field is blank
+     * @return Group id for this component referencing a controller service. If this component is another service, this field is blank
      */
     public String getGroupId() {
         return groupId;
@@ -92,8 +89,7 @@ public class ControllerServiceReferencingComponentDTO {
     }
 
     /**
-     * @return state of the processor referencing a controller service. If this
-     * component is another service, this field is blank
+     * @return state of the processor referencing a controller service. If this component is another service, this field is blank
      */
     public String getState() {
         return state;
@@ -104,8 +100,7 @@ public class ControllerServiceReferencingComponentDTO {
     }
 
     /**
-     * @return type of reference this is (Processor, ControllerService, or
-     * ReportingTask)
+     * @return type of reference this is (Processor, ControllerService, or ReportingTask)
      */
     public String getReferenceType() {
         return referenceType;
@@ -160,8 +155,7 @@ public class ControllerServiceReferencingComponentDTO {
     }
 
     /**
-     * @return If this referencing component represents a ControllerService, these are
-     * the components that reference it
+     * @return If this referencing component represents a ControllerService, these are the components that reference it
      */
     public Set<ControllerServiceReferencingComponentDTO> getReferencingComponents() {
         return referencingComponents;
@@ -172,8 +166,7 @@ public class ControllerServiceReferencingComponentDTO {
     }
 
     /**
-     * @return If this referencing component represents a ControllerService, this
-     * indicates whether it has already been represented in this hierarchy
+     * @return If this referencing component represents a ControllerService, this indicates whether it has already been represented in this hierarchy
      */
     public Boolean getReferenceCycle() {
         return referenceCycle;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/CounterDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/CounterDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/CounterDTO.java
index 2df4dd4..7f47bf5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/CounterDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/CounterDTO.java
@@ -19,8 +19,7 @@ package org.apache.nifi.web.api.dto;
 import javax.xml.bind.annotation.XmlType;
 
 /**
- * Counter value for a specific component in a specific context. A counter is a
- * value that a component can adjust during processing.
+ * Counter value for a specific component in a specific context. A counter is a value that a component can adjust during processing.
  */
 @XmlType(name = "counter")
 public class CounterDTO {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/LabelDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/LabelDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/LabelDTO.java
index 9339a07..d15ceb4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/LabelDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/LabelDTO.java
@@ -16,7 +16,6 @@
  */
 package org.apache.nifi.web.api.dto;
 
-import java.util.Collections;
 import java.util.Map;
 
 import javax.xml.bind.annotation.XmlType;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/NiFiComponentDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/NiFiComponentDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/NiFiComponentDTO.java
index 1ed0676..074a2e3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/NiFiComponentDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/NiFiComponentDTO.java
@@ -55,8 +55,7 @@ public class NiFiComponentDTO {
     }
 
     /**
-     * @return id for the parent group of this component if applicable, null
-     * otherwise
+     * @return id for the parent group of this component if applicable, null otherwise
      */
     public String getParentGroupId() {
         return parentGroupId;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PortDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PortDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PortDTO.java
index 464beaf..6a90723 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PortDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PortDTO.java
@@ -49,8 +49,7 @@ public class PortDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return The state of this port. Possible states are 'RUNNING', 'STOPPED', and
-     * 'DISABLED'
+     * @return The state of this port. Possible states are 'RUNNING', 'STOPPED', and 'DISABLED'
      */
     public String getState() {
         return state;
@@ -96,8 +95,7 @@ public class PortDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return whether this port has incoming or outgoing connections to a remote NiFi.
-     * This is only applicable when the port is running on the root group
+     * @return whether this port has incoming or outgoing connections to a remote NiFi. This is only applicable when the port is running on the root group
      */
     public Boolean isTransmitting() {
         return transmitting;
@@ -130,9 +128,7 @@ public class PortDTO extends NiFiComponentDTO {
     }
 
     /**
-     * Gets the validation errors from this port. These validation errors
-     * represent the problems with the port that must be resolved before it can
-     * be started.
+     * Gets the validation errors from this port. These validation errors represent the problems with the port that must be resolved before it can be started.
      *
      * @return The validation errors
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessGroupDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessGroupDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessGroupDTO.java
index 870c32a..f9a6551 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessGroupDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessGroupDTO.java
@@ -84,8 +84,7 @@ public class ProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return contents of this process group. This field will be populated if the
-     * request is marked verbose
+     * @return contents of this process group. This field will be populated if the request is marked verbose
      */
     public FlowSnippetDTO getContents() {
         return contents;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java
index b507033..1832ce3 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorConfigDTO.java
@@ -54,8 +54,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * The frequency with which to schedule the processor. The format of the
-     * value will depend on the value of {@link #getSchedulingStrategy()}.
+     * The frequency with which to schedule the processor. The format of the value will depend on the value of {@link #getSchedulingStrategy()}.
      *
      * @return The scheduling period
      */
@@ -68,8 +67,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * Indicates whether the processor should be scheduled to run in
-     * event-driven mode or timer-driven mode
+     * Indicates whether the processor should be scheduled to run in event-driven mode or timer-driven mode
      *
      * @return scheduling strategy
      */
@@ -93,8 +91,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * @return amount of time must elaspe before this processor is
-     * scheduled again when yielding
+     * @return amount of time must elaspe before this processor is scheduled again when yielding
      */
     public String getYieldDuration() {
         return yieldDuration;
@@ -116,9 +113,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * The number of tasks that should be concurrently scheduled for this
-     * processor. If this processor doesn't allow parallel processing then any
-     * positive input will be ignored.
+     * The number of tasks that should be concurrently scheduled for this processor. If this processor doesn't allow parallel processing then any positive input will be ignored.
      *
      * @return the concurrently schedulable task count
      */
@@ -153,11 +148,8 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * The properties for this processor. Properties whose value is not set will
-     * only contain the property name. These properties are (un)marshalled
-     * differently since we need/want to control the ordering of the properties.
-     * The descriptors and metadata are used as a lookup when processing these
-     * properties.
+     * The properties for this processor. Properties whose value is not set will only contain the property name. These properties are (un)marshalled differently since we need/want to control the
+     * ordering of the properties. The descriptors and metadata are used as a lookup when processing these properties.
      *
      * @return The optional properties
      */
@@ -194,8 +186,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * @return the URL for this processors custom configuration UI if
-     * applicable. Null otherwise.
+     * @return the URL for this processors custom configuration UI if applicable. Null otherwise.
      */
     public String getCustomUiUrl() {
         return customUiUrl;
@@ -206,8 +197,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * @return the names of all processor relationships that cause a flow file to be
-     * terminated if the relationship is not connected to anything
+     * @return the names of all processor relationships that cause a flow file to be terminated if the relationship is not connected to anything
      */
     public Set<String> getAutoTerminatedRelationships() {
         return autoTerminatedRelationships;
@@ -218,8 +208,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * @return maps default values for concurrent tasks for each applicable scheduling
-     * strategy.
+     * @return maps default values for concurrent tasks for each applicable scheduling strategy.
      */
     public Map<String, String> getDefaultConcurrentTasks() {
         return defaultConcurrentTasks;
@@ -241,8 +230,7 @@ public class ProcessorConfigDTO {
     }
 
     /**
-     * @return Maps default values for scheduling period for each applicable scheduling
-     * strategy
+     * @return Maps default values for scheduling period for each applicable scheduling strategy
      */
     public Map<String, String> getDefaultSchedulingPeriod() {
         return defaultSchedulingPeriod;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
index cda26cd..b80db70 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
@@ -71,8 +71,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return The state of this processor. Possible states are 'RUNNING', 'STOPPED',
-     * and 'DISABLED'
+     * @return The state of this processor. Possible states are 'RUNNING', 'STOPPED', and 'DISABLED'
      */
     public String getState() {
         return state;
@@ -129,8 +128,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
-     * The configuration details for this processor. These details will be
-     * included in a response if the verbose flag is set to true.
+     * The configuration details for this processor. These details will be included in a response if the verbose flag is set to true.
      *
      * @return The processor configuration details
      */
@@ -143,9 +141,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
-     * Gets the validation errors from this processor. These validation errors
-     * represent the problems with the processor that must be resolved before it
-     * can be started.
+     * Gets the validation errors from this processor. These validation errors represent the problems with the processor that must be resolved before it can be started.
      *
      * @return The validation errors
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PropertyDescriptorDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PropertyDescriptorDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PropertyDescriptorDTO.java
index af3de2b..02a55a7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PropertyDescriptorDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/PropertyDescriptorDTO.java
@@ -37,8 +37,7 @@ public class PropertyDescriptorDTO {
     private String identifiesControllerService;
 
     /**
-     * @return set of allowable values for this property. If empty then the
-     * allowable values are not constrained
+     * @return set of allowable values for this property. If empty then the allowable values are not constrained
      */
     public List<AllowableValueDTO> getAllowableValues() {
         return allowableValues;
@@ -60,9 +59,7 @@ public class PropertyDescriptorDTO {
     }
 
     /**
-     * @return An explanation of the meaning of the given property. This
-     * description is meant to be displayed to a user or simply provide a
-     * mechanism of documenting intent
+     * @return An explanation of the meaning of the given property. This description is meant to be displayed to a user or simply provide a mechanism of documenting intent
      */
     public String getDescription() {
         return description;
@@ -106,8 +103,7 @@ public class PropertyDescriptorDTO {
     }
 
     /**
-     * @return indicates that the value for this property should be considered
-     * sensitive and protected whenever stored or represented
+     * @return indicates that the value for this property should be considered sensitive and protected whenever stored or represented
      */
     public boolean isSensitive() {
         return sensitive;
@@ -129,8 +125,7 @@ public class PropertyDescriptorDTO {
     }
 
     /**
-     * @return specifies whether or not this property support expression
-     * language
+     * @return specifies whether or not this property support expression language
      */
     public boolean getSupportsEl() {
         return supportsEl;
@@ -141,8 +136,7 @@ public class PropertyDescriptorDTO {
     }
 
     /**
-     * @return if this property identifies a controller service, this returns
-     * the fully qualified type, null otherwise
+     * @return if this property identifies a controller service, this returns the fully qualified type, null otherwise
      */
     public String getIdentifiesControllerService() {
         return identifiesControllerService;
@@ -163,8 +157,7 @@ public class PropertyDescriptorDTO {
         private String description;
 
         /**
-         * @return the human-readable value that is allowed for this
-         * PropertyDescriptor
+         * @return the human-readable value that is allowed for this PropertyDescriptor
          */
         public String getDisplayName() {
             return displayName;
@@ -186,8 +179,7 @@ public class PropertyDescriptorDTO {
         }
 
         /**
-         * @return a description of this Allowable Value, or <code>null</code>
-         * if no description is given
+         * @return a description of this Allowable Value, or <code>null</code> if no description is given
          */
         public String getDescription() {
             return description;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupDTO.java
index 22bbee0..b30320a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupDTO.java
@@ -129,8 +129,7 @@ public class RemoteProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return the time period used for the timeout when communicating with this
-     * RemoteProcessGroup
+     * @return the time period used for the timeout when communicating with this RemoteProcessGroup
      */
     public String getCommunicationsTimeout() {
         return communicationsTimeout;
@@ -141,8 +140,7 @@ public class RemoteProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return when yielding, this amount of time must elaspe before this remote process
-     * group is scheduled again
+     * @return when yielding, this amount of time must elaspe before this remote process group is scheduled again
      */
     public String getYieldDuration() {
         return yieldDuration;
@@ -197,8 +195,7 @@ public class RemoteProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return number of Remote Input Ports currently available in the remote NiFi
-     * instance
+     * @return number of Remote Input Ports currently available in the remote NiFi instance
      */
     public Integer getInputPortCount() {
         return inputPortCount;
@@ -209,8 +206,7 @@ public class RemoteProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return number of Remote Output Ports currently available in the remote NiFi
-     * instance
+     * @return number of Remote Output Ports currently available in the remote NiFi instance
      */
     public Integer getOutputPortCount() {
         return outputPortCount;
@@ -221,8 +217,7 @@ public class RemoteProcessGroupDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return contents of this remote process group. Will contain available
-     * input/output ports
+     * @return contents of this remote process group. Will contain available input/output ports
      */
     public RemoteProcessGroupContentsDTO getContents() {
         return contents;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupPortDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupPortDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupPortDTO.java
index 71d0f66..07f8ced 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupPortDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RemoteProcessGroupPortDTO.java
@@ -47,8 +47,7 @@ public class RemoteProcessGroupPortDTO {
     }
 
     /**
-     * @return number tasks that may transmit flow files to the target port
-     * concurrently
+     * @return number tasks that may transmit flow files to the target port concurrently
      */
     public Integer getConcurrentlySchedulableTaskCount() {
         return concurrentlySchedulableTaskCount;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ReportingTaskDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ReportingTaskDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ReportingTaskDTO.java
index 6b6558a..4abba4b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ReportingTaskDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ReportingTaskDTO.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import javax.xml.bind.annotation.XmlType;
 
 /**
- * Component that is capable of reporting internal NiFi state to an external
- * service
+ * Component that is capable of reporting internal NiFi state to an external service
  */
 @XmlType(name = "reportingTask")
 public class ReportingTaskDTO extends NiFiComponentDTO {
@@ -81,8 +80,7 @@ public class ReportingTaskDTO extends NiFiComponentDTO {
     }
 
     /**
-     * The frequency with which to schedule the reporting task. The format of
-     * the value will depend on the value of {@link #getSchedulingStrategy()}.
+     * The frequency with which to schedule the reporting task. The format of the value will depend on the value of {@link #getSchedulingStrategy()}.
      *
      * @return The scheduling period
      */
@@ -106,8 +104,7 @@ public class ReportingTaskDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return The scheduling strategy that determines how the
-     * {@link #getSchedulingPeriod()} value should be interpreted
+     * @return The scheduling strategy that determines how the {@link #getSchedulingPeriod()} value should be interpreted
      */
     public String getSchedulingStrategy() {
         return schedulingStrategy;
@@ -118,8 +115,7 @@ public class ReportingTaskDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return Where this service is available. Possible values are
-     * CLUSTER_MANAGER_ONLY, NODE_ONLY, BOTH
+     * @return Where this service is available. Possible values are CLUSTER_MANAGER_ONLY, NODE_ONLY, BOTH
      */
     public String getAvailability() {
         return availability;
@@ -152,8 +148,7 @@ public class ReportingTaskDTO extends NiFiComponentDTO {
     }
 
     /**
-     * @return the URL for this reporting task custom configuration UI if
-     * applicable. Null otherwise
+     * @return the URL for this reporting task custom configuration UI if applicable. Null otherwise
      */
     public String getCustomUiUrl() {
         return customUiUrl;
@@ -175,9 +170,7 @@ public class ReportingTaskDTO extends NiFiComponentDTO {
     }
 
     /**
-     * Gets the validation errors from this reporting task. These validation
-     * errors represent the problems with the reporting task that must be
-     * resolved before it can be scheduled to run.
+     * Gets the validation errors from this reporting task. These validation errors represent the problems with the reporting task that must be resolved before it can be scheduled to run.
      *
      * @return The validation errors
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RevisionDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RevisionDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RevisionDTO.java
index 7a60c6e..e8f4309 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RevisionDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RevisionDTO.java
@@ -30,11 +30,8 @@ public class RevisionDTO {
 
     /* getters / setters */
     /**
-     * A client identifier used to make a request. By including a client
-     * identifier, the API can allow multiple requests without needing the
-     * current revision. Due to the asynchronous nature of requests/responses
-     * this was implemented to allow the client to make numerous requests
-     * without having to wait for the previous response to come back.
+     * A client identifier used to make a request. By including a client identifier, the API can allow multiple requests without needing the current revision. Due to the asynchronous nature of
+     * requests/responses this was implemented to allow the client to make numerous requests without having to wait for the previous response to come back.
      *
      * @return The client id
      */
@@ -47,9 +44,7 @@ public class RevisionDTO {
     }
 
     /**
-     * NiFi employs an optimistic locking strategy where the client must include
-     * a revision in their request when performing an update. In a response,
-     * this field represents the updated base version.
+     * NiFi employs an optimistic locking strategy where the client must include a revision in their request when performing an update. In a response, this field represents the updated base version.
      *
      * @return The revision
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/SnippetDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/SnippetDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/SnippetDTO.java
index 8e2c215..810b7be 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/SnippetDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/SnippetDTO.java
@@ -88,10 +88,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the connections in this snippet. These ids will be populated
-     * within each response. They can be specified when creating a snippet.
-     * However, once a snippet has been created its contents cannot be modified
-     * (these ids are ignored during update requests)
+     * @return the ids of the connections in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created its
+     * contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getConnections() {
         return connections;
@@ -102,10 +100,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the funnels in this snippet. These ids will be populated
-     * within each response. They can be specified when creating a snippet.
-     * However, once a snippet has been created its contents cannot be modified
-     * (these ids are ignored during update requests)
+     * @return the ids of the funnels in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created its
+     * contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getFunnels() {
         return funnels;
@@ -116,10 +112,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the input port in this snippet. These ids will be populated
-     * within each response. They can be specified when creating a snippet.
-     * However, once a snippet has been created its contents cannot be modified
-     * (these ids are ignored during update requests)
+     * @return the ids of the input port in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created its
+     * contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getInputPorts() {
         return inputPorts;
@@ -130,10 +124,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the labels in this snippet. These ids will be populated within
-     * each response. They can be specified when creating a snippet. However,
-     * once a snippet has been created its contents cannot be modified (these
-     * ids are ignored during update requests)
+     * @return the ids of the labels in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created its
+     * contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getLabels() {
         return labels;
@@ -144,10 +136,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the output ports in this snippet. These ids will be populated
-     * within each response. They can be specified when creating a snippet.
-     * However, once a snippet has been created its contents cannot be modified
-     * (these ids are ignored during update requests)
+     * @return the ids of the output ports in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created
+     * its contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getOutputPorts() {
         return outputPorts;
@@ -158,10 +148,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return The ids of the process groups in this snippet. These ids will be
-     * populated within each response. They can be specified when creating a
-     * snippet. However, once a snippet has been created its contents cannot be
-     * modified (these ids are ignored during update requests)
+     * @return The ids of the process groups in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created
+     * its contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getProcessGroups() {
         return processGroups;
@@ -172,10 +160,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return The ids of the processors in this snippet. These ids will be populated
-     * within each response. They can be specified when creating a snippet.
-     * However, once a snippet has been created its contents cannot be modified
-     * (these ids are ignored during update requests)
+     * @return The ids of the processors in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been created its
+     * contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getProcessors() {
         return processors;
@@ -186,10 +172,8 @@ public class SnippetDTO {
     }
 
     /**
-     * @return the ids of the remote process groups in this snippet. These ids will be
-     * populated within each response. They can be specified when creating a
-     * snippet. However, once a snippet has been created its contents cannot be
-     * modified (these ids are ignored during update requests)
+     * @return the ids of the remote process groups in this snippet. These ids will be populated within each response. They can be specified when creating a snippet. However, once a snippet has been
+     * created its contents cannot be modified (these ids are ignored during update requests)
      */
     public Set<String> getRemoteProcessGroups() {
         return remoteProcessGroups;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/UserDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/UserDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/UserDTO.java
index c62f99c..2c56422 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/UserDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/UserDTO.java
@@ -86,8 +86,7 @@ public class UserDTO {
     }
 
     /**
-     * @return users name. If the name could not be extracted from the DN, this
-     * value will be the entire DN
+     * @return users name. If the name could not be extracted from the DN, this value will be the entire DN
      */
     public String getUserName() {
         return userName;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceDTO.java
index 333570e..54a5858 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceDTO.java
@@ -24,8 +24,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 import org.apache.nifi.web.api.dto.util.TimestampAdapter;
 
 /**
- * A provenance submission. Incorporates the request, its current status, and
- * the results.
+ * A provenance submission. Incorporates the request, its current status, and the results.
  */
 @XmlType(name = "provenance")
 public class ProvenanceDTO {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceEventDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceEventDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceEventDTO.java
index 883fce8..46c1074 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceEventDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/ProvenanceEventDTO.java
@@ -172,8 +172,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return id of the group that this component resides in. If the component is
-     * no longer in the flow, the group id will not be set
+     * @return id of the group that this component resides in. If the component is no longer in the flow, the group id will not be set
      */
     public String getGroupId() {
         return groupId;
@@ -316,8 +315,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return If this represents a route event, this is the relationship to which the
-     * flowfile was routed
+     * @return If this represents a route event, this is the relationship to which the flowfile was routed
      */
     public String getRelationship() {
         return relationship;
@@ -361,8 +359,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Section in which the output Content Claim lives, or
-     * <code>null</code> if no Content Claim exists
+     * @return the Section in which the output Content Claim lives, or <code>null</code> if no Content Claim exists
      */
     public String getOutputContentClaimSection() {
         return outputContentClaimSection;
@@ -373,8 +370,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Container in which the output Content Claim lives, or
-     * <code>null</code> if no Content Claim exists
+     * @return the Container in which the output Content Claim lives, or <code>null</code> if no Content Claim exists
      */
     public String getOutputContentClaimContainer() {
         return outputContentClaimContainer;
@@ -385,8 +381,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Identifier of the output Content Claim, or <code>null</code>
-     * if no Content Claim exists
+     * @return the Identifier of the output Content Claim, or <code>null</code> if no Content Claim exists
      */
     public String getOutputContentClaimIdentifier() {
         return outputContentClaimIdentifier;
@@ -397,8 +392,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the offset into the the output Content Claim where the FlowFile's
-     * content begins, or <code>null</code> if no Content Claim exists
+     * @return the offset into the the output Content Claim where the FlowFile's content begins, or <code>null</code> if no Content Claim exists
      */
     public Long getOutputContentClaimOffset() {
         return outputContentClaimOffset;
@@ -442,8 +436,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Section in which the input Content Claim lives, or
-     * <code>null</code> if no Content Claim exists
+     * @return the Section in which the input Content Claim lives, or <code>null</code> if no Content Claim exists
      */
     public String getInputContentClaimSection() {
         return inputContentClaimSection;
@@ -454,8 +447,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Container in which the input Content Claim lives, or
-     * <code>null</code> if no Content Claim exists
+     * @return the Container in which the input Content Claim lives, or <code>null</code> if no Content Claim exists
      */
     public String getInputContentClaimContainer() {
         return inputContentClaimContainer;
@@ -466,8 +458,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the Identifier of the input Content Claim, or <code>null</code>
-     * if no Content Claim exists
+     * @return the Identifier of the input Content Claim, or <code>null</code> if no Content Claim exists
      */
     public String getInputContentClaimIdentifier() {
         return inputContentClaimIdentifier;
@@ -478,8 +469,7 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return the offset into the the input Content Claim where the FlowFile's
-     * content begins, or <code>null</code> if no Content Claim exists
+     * @return the offset into the the input Content Claim where the FlowFile's content begins, or <code>null</code> if no Content Claim exists
      */
     public Long getInputContentClaimOffset() {
         return inputContentClaimOffset;
@@ -534,9 +524,8 @@ public class ProvenanceEventDTO {
     }
 
     /**
-     * @return identifier of the FlowFile Queue / Connection from which the
-     * FlowFile was pulled to generate this event, or <code>null</code> if
-     * either the queue is unknown or the FlowFile was created by this event
+     * @return identifier of the FlowFile Queue / Connection from which the FlowFile was pulled to generate this event, or <code>null</code> if either the queue is unknown or the FlowFile was created
+     * by this event
      */
     public String getSourceConnectionIdentifier() {
         return sourceConnectionIdentifier;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/LineageRequestDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/LineageRequestDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/LineageRequestDTO.java
index afcea3f..2494962 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/LineageRequestDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/LineageRequestDTO.java
@@ -54,11 +54,8 @@ public class LineageRequestDTO {
     }
 
     /**
-     * @return type of lineage request. Either 'PARENTS', 'CHILDREN', or 'FLOWFILE'.
-     * PARENTS will return the lineage for the flowfiles that are parents of the
-     * specified event. CHILDREN will return the lineage of for the flowfiles
-     * that are children of the specified event. FLOWFILE will return the
-     * lineage for the specified flowfile.
+     * @return type of lineage request. Either 'PARENTS', 'CHILDREN', or 'FLOWFILE'. PARENTS will return the lineage for the flowfiles that are parents of the specified event. CHILDREN will return the
+     * lineage of for the flowfiles that are children of the specified event. FLOWFILE will return the lineage for the specified flowfile.
      */
     public LineageRequestType getLineageRequestType() {
         return lineageRequestType;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/ProvenanceNodeDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/ProvenanceNodeDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/ProvenanceNodeDTO.java
index 4a7ed9c..b517751 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/ProvenanceNodeDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/provenance/lineage/ProvenanceNodeDTO.java
@@ -25,8 +25,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 import org.apache.nifi.web.api.dto.util.TimestampAdapter;
 
 /**
- * A node within a provenance lineage. May represent either an event or a
- * flowfile.
+ * A node within a provenance lineage. May represent either an event or a flowfile.
  */
 @XmlType(name = "provenanceNode")
 public class ProvenanceNodeDTO {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ControllerStatusDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ControllerStatusDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ControllerStatusDTO.java
index adfee2a..bd2eca6 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ControllerStatusDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ControllerStatusDTO.java
@@ -65,8 +65,7 @@ public class ControllerStatusDTO {
     }
 
     /**
-     * @return Used in clustering, will report the number of nodes connected vs the
-     * number of nodes in the cluster
+     * @return Used in clustering, will report the number of nodes connected vs the number of nodes in the cluster
      */
     public String getConnectedNodes() {
         return connectedNodes;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/PortStatusDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/PortStatusDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/PortStatusDTO.java
index db03146..e4cbd34 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/PortStatusDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/PortStatusDTO.java
@@ -100,8 +100,7 @@ public class PortStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return The total count and size of flow files that have been accepted in the
-     * last five minutes
+     * @return The total count and size of flow files that have been accepted in the last five minutes
      */
     public String getInput() {
         return input;
@@ -112,8 +111,7 @@ public class PortStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return The total count and size of flow files that have been processed in the
-     * last five minutes
+     * @return The total count and size of flow files that have been processed in the last five minutes
      */
     public String getOutput() {
         return output;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessGroupStatusDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessGroupStatusDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessGroupStatusDTO.java
index 7ad24a9..6aa445a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessGroupStatusDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessGroupStatusDTO.java
@@ -177,8 +177,7 @@ public class ProcessGroupStatusDTO extends StatusDTO {
     }
 
     /**
-     * The transferred stats for this process group. This represents the
-     * count/size of flowfiles transferred to/from queues.
+     * The transferred stats for this process group. This represents the count/size of flowfiles transferred to/from queues.
      *
      * @return The transferred status for this process group
      */
@@ -191,8 +190,7 @@ public class ProcessGroupStatusDTO extends StatusDTO {
     }
 
     /**
-     * The received stats for this process group. This represents the count/size
-     * of flowfiles received.
+     * The received stats for this process group. This represents the count/size of flowfiles received.
      *
      * @return The received stats for this process group
      */
@@ -205,8 +203,7 @@ public class ProcessGroupStatusDTO extends StatusDTO {
     }
 
     /**
-     * The sent stats for this process group. This represents the count/size of
-     * flowfiles sent.
+     * The sent stats for this process group. This represents the count/size of flowfiles sent.
      *
      * @return The sent stats for this process group
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessorStatusDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessorStatusDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessorStatusDTO.java
index 18539be..21c3d44 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessorStatusDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/ProcessorStatusDTO.java
@@ -86,8 +86,7 @@ public class ProcessorStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return The total count and size of flow files that have been accepted in the
-     * last five minutes
+     * @return The total count and size of flow files that have been accepted in the last five minutes
      */
     public String getInput() {
         return input;
@@ -131,8 +130,7 @@ public class ProcessorStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return The total count and size of flow files that have been processed in the
-     * last five minutes
+     * @return The total count and size of flow files that have been processed in the last five minutes
      */
     public String getOutput() {
         return output;
@@ -165,8 +163,7 @@ public class ProcessorStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return total duration of all tasks for this connectable over the last 5
-     * minutes
+     * @return total duration of all tasks for this connectable over the last 5 minutes
      */
     public String getTasksDuration() {
         return tasksDuration;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/RemoteProcessGroupStatusDTO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/RemoteProcessGroupStatusDTO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/RemoteProcessGroupStatusDTO.java
index 68aa5d6..5f7c2c1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/RemoteProcessGroupStatusDTO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/status/RemoteProcessGroupStatusDTO.java
@@ -115,8 +115,7 @@ public class RemoteProcessGroupStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return Formatted description of the amount of data sent to this remote process
-     * group
+     * @return Formatted description of the amount of data sent to this remote process group
      */
     public String getSent() {
         return sent;
@@ -127,8 +126,7 @@ public class RemoteProcessGroupStatusDTO extends StatusDTO {
     }
 
     /**
-     * @return Formatted description of the amount of data received from this remote
-     * process group
+     * @return Formatted description of the amount of data received from this remote process group
      */
     public String getReceived() {
         return received;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AboutEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AboutEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AboutEntity.java
index 36fc163..5cdbe1e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AboutEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AboutEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.AboutDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a AboutDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a AboutDTO.
  */
 @XmlRootElement(name = "aboutEntity")
 public class AboutEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ActionEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ActionEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ActionEntity.java
index 1b2c0b6..d52f5fb 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ActionEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/ActionEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.action.ActionDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to an ActionDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to an ActionDTO.
  */
 @XmlRootElement(name = "actionEntity")
 public class ActionEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AuthorityEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AuthorityEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AuthorityEntity.java
index 103a937..9b023ba 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AuthorityEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/AuthorityEntity.java
@@ -21,9 +21,7 @@ import java.util.Set;
 import javax.xml.bind.annotation.XmlRootElement;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a response to the API. This particular entity holds a set of user
- * authorities.
+ * A serialized representation of this class can be placed in the entity body of a response to the API. This particular entity holds a set of user authorities.
  */
 @XmlRootElement(name = "authoritiesEntity")
 public class AuthorityEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BannerEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BannerEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BannerEntity.java
index 594707a..c9a8cd2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BannerEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BannerEntity.java
@@ -21,9 +21,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.BannerDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a BannerDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a BannerDTO.
  */
 @XmlRootElement(name = "bannersEntity")
 public class BannerEntity extends Entity {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/a52cf529/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BulletinBoardEntity.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BulletinBoardEntity.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BulletinBoardEntity.java
index 4287893..f648d50 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BulletinBoardEntity.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/entity/BulletinBoardEntity.java
@@ -20,9 +20,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.nifi.web.api.dto.BulletinBoardDTO;
 
 /**
- * A serialized representation of this class can be placed in the entity body of
- * a request or response to or from the API. This particular entity holds a
- * reference to a BulletinBoardDTO.
+ * A serialized representation of this class can be placed in the entity body of a request or response to or from the API. This particular entity holds a reference to a BulletinBoardDTO.
  */
 @XmlRootElement(name = "bulletinBoardEntity")
 public class BulletinBoardEntity extends Entity {


[42/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
NIFI-271


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/e6274827
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/e6274827
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/e6274827

Branch: refs/heads/NIFI-292
Commit: e627482734ab701bd0ccc5b5f389340df8ed90f4
Parents: 525ce7f
Author: joewitt <jo...@apache.org>
Authored: Tue Apr 28 00:35:56 2015 -0400
Committer: joewitt <jo...@apache.org>
Committed: Tue Apr 28 00:35:56 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/processors/GeoEnrichIP.java | 14 +++----
 .../nifi/processors/maxmind/DatabaseReader.java | 40 +++++++-------------
 2 files changed, 20 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e6274827/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index 1ecb221..b766878 100644
--- a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -189,17 +189,17 @@ public class GeoEnrichIP extends AbstractProcessor {
         final Map<String, String> attrs = new HashMap<>();
         attrs.put(new StringBuilder(ipAttributeName).append(".geo.lookup.micros").toString(), String.valueOf(stopWatch.getDuration(TimeUnit.MICROSECONDS)));
         attrs.put(new StringBuilder(ipAttributeName).append(".geo.city").toString(), response.getCity().getName());
-        
+
         final Double latitude = response.getLocation().getLatitude();
-        if ( latitude != null ) {
-        	attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), latitude.toString());
+        if (latitude != null) {
+            attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), latitude.toString());
         }
-        
+
         final Double longitude = response.getLocation().getLongitude();
-        if ( longitude != null ) {
-        	attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), longitude.toString());
+        if (longitude != null) {
+            attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), longitude.toString());
         }
-        
+
         int i = 0;
         for (final Subdivision subd : response.getSubdivisions()) {
             attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.").append(i).toString(), subd.getName());

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e6274827/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/maxmind/DatabaseReader.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/maxmind/DatabaseReader.java b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/maxmind/DatabaseReader.java
index f1d26bf..fb84daf 100644
--- a/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/maxmind/DatabaseReader.java
+++ b/nifi/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/maxmind/DatabaseReader.java
@@ -43,17 +43,13 @@ import com.maxmind.geoip2.model.IspResponse;
 
 /**
  * <p>
- * This class was copied from
- * https://raw.githubusercontent.com/maxmind/GeoIP2-java/master/src/main/java/com/maxmind/geoip2/DatabaseReader.java
- * It is written by Maxmind and it is available under Apache Software License V2
+ * This class was copied from https://raw.githubusercontent.com/maxmind/GeoIP2-java/master/src/main/java/com/maxmind/geoip2/DatabaseReader.java It is written by Maxmind and it is available under
+ * Apache Software License V2
  *
- * The modification we're making to the code below is to stop using exceptions
- * for mainline flow control. Specifically we don't want to throw an exception
- * simply because an address was not found.
+ * The modification we're making to the code below is to stop using exceptions for mainline flow control. Specifically we don't want to throw an exception simply because an address was not found.
  * </p>
  *
- * Instances of this class provide a reader for the GeoIP2 database format. IP
- * addresses can be looked up using the <code>get</code> method.
+ * Instances of this class provide a reader for the GeoIP2 database format. IP addresses can be looked up using the <code>get</code> method.
  */
 public class DatabaseReader implements GeoIp2Provider, Closeable {
 
@@ -84,12 +80,10 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
 
     /**
      * <p>
-     * Constructs a Builder for the DatabaseReader. The file passed to it must
-     * be a valid GeoIP2 database file.
+     * Constructs a Builder for the DatabaseReader. The file passed to it must be a valid GeoIP2 database file.
      * </p>
      * <p>
-     * <code>Builder</code> creates instances of <code>DatabaseReader</code>
-     * from values set by the methods.
+     * <code>Builder</code> creates instances of <code>DatabaseReader</code> from values set by the methods.
      * </p>
      * <p>
      * Only the values set in the <code>Builder</code> constructor are required.
@@ -120,8 +114,7 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
         }
 
         /**
-         * @param val List of locale codes to use in name property from most
-         * preferred to least preferred.
+         * @param val List of locale codes to use in name property from most preferred to least preferred.
          * @return Builder object
          */
         public Builder locales(List<String> val) {
@@ -132,9 +125,7 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
         /**
          * @param val The file mode used to open the GeoIP2 database
          * @return Builder object
-         * @throws java.lang.IllegalArgumentException if you initialized the
-         * Builder with a URL, which uses {@link FileMode#MEMORY}, but you
-         * provided a different FileMode to this method.
+         * @throws java.lang.IllegalArgumentException if you initialized the Builder with a URL, which uses {@link FileMode#MEMORY}, but you provided a different FileMode to this method.
          */
         public Builder fileMode(FileMode val) {
             if (this.stream != null && !FileMode.MEMORY.equals(val)) {
@@ -146,8 +137,7 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
         }
 
         /**
-         * @return an instance of <code>DatabaseReader</code> created from the
-         * fields set on this builder.
+         * @return an instance of <code>DatabaseReader</code> created from the fields set on this builder.
          * @throws IOException if there is an error reading the database
          */
         public DatabaseReader build() throws IOException {
@@ -157,10 +147,8 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
 
     /**
      * @param ipAddress IPv4 or IPv6 address to lookup.
-     * @return An object of type T with the data for the IP address or null if no
-     * information could be found for the given IP address
-     * @throws IOException if there is an error opening or reading from the
-     * file.
+     * @return An object of type T with the data for the IP address or null if no information could be found for the given IP address
+     * @throws IOException if there is an error opening or reading from the file.
      */
     private <T> T get(InetAddress ipAddress, Class<T> cls, boolean hasTraits,
             String type) throws IOException, AddressNotFoundException {
@@ -200,10 +188,8 @@ public class DatabaseReader implements GeoIp2Provider, Closeable {
      * </p>
      * <p>
      * If you are using <code>FileMode.MEMORY_MAPPED</code>, this will
-     * <em>not</em> unmap the underlying file due to a limitation in Java's
-     * <code>MappedByteBuffer</code>. It will however set the reference to the
-     * buffer to <code>null</code>, allowing the garbage collector to collect
-     * it.
+     * <em>not</em> unmap the underlying file due to a limitation in Java's <code>MappedByteBuffer</code>. It will however set the reference to the buffer to <code>null</code>, allowing the garbage
+     * collector to collect it.
      * </p>
      *
      * @throws IOException if an I/O error occurs.


[23/50] [abbrv] incubator-nifi git commit: Merge branch 'develop' of http://git-wip-us.apache.org/repos/asf/incubator-nifi into develop

Posted by mc...@apache.org.
Merge branch 'develop' of http://git-wip-us.apache.org/repos/asf/incubator-nifi into develop


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/384b2ac2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/384b2ac2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/384b2ac2

Branch: refs/heads/NIFI-292
Commit: 384b2ac2535987a42ae36568d285c829461a1587
Parents: 3cd18b0 1086094
Author: Mark Payne <ma...@hotmail.com>
Authored: Mon Apr 27 14:14:48 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Mon Apr 27 14:14:48 2015 -0400

----------------------------------------------------------------------
 nifi-parent/pom.xml                             |   6 +-
 .../nifi-hadoop-bundle/nifi-hadoop-nar/pom.xml  |  28 +--
 .../hadoop/AbstractHadoopProcessor.java         |   7 +-
 .../hadoop/CreateHadoopSequenceFile.java        |  28 +--
 .../apache/nifi/processors/hadoop/GetHDFS.java  |  60 +++----
 .../processors/hadoop/GetHDFSSequenceFile.java  |  18 +-
 .../nifi/processors/hadoop/KeyValueReader.java  |   6 +-
 .../nifi/processors/hadoop/ValueReader.java     |   5 +-
 .../hadoop/util/ByteFilteringOutputStream.java  |  24 +--
 .../hadoop/util/InputStreamWritable.java        |   6 +-
 .../hadoop/util/OutputStreamWritable.java       |   3 +-
 .../hadoop/util/SequenceFileWriter.java         |  12 +-
 .../nifi/processors/standard/BinFiles.java      |  15 +-
 .../processors/standard/CompressContent.java    |   9 +-
 .../nifi/processors/standard/ControlRate.java   |  11 +-
 .../standard/ConvertCharacterSet.java           |  22 ++-
 .../processors/standard/DistributeLoad.java     |  17 +-
 .../processors/standard/EvaluateJsonPath.java   |  36 ++--
 .../nifi/processors/standard/EvaluateXPath.java |   9 +-
 .../processors/standard/EvaluateXQuery.java     |   6 +-
 .../processors/standard/ExecuteProcess.java     |   9 +-
 .../standard/ExecuteStreamCommand.java          |  21 +--
 .../nifi/processors/standard/ExtractText.java   |   3 +-
 .../processors/standard/GenerateFlowFile.java   |   3 +-
 .../nifi/processors/standard/GetFile.java       |   6 +-
 .../nifi/processors/standard/GetJMSTopic.java   |   3 +-
 .../processors/standard/HandleHttpRequest.java  |  17 +-
 .../processors/standard/HandleHttpResponse.java |   3 +-
 .../nifi/processors/standard/HashAttribute.java |  10 +-
 .../nifi/processors/standard/InvokeHTTP.java    |  24 ++-
 .../nifi/processors/standard/JmsConsumer.java   |   6 +-
 .../nifi/processors/standard/ListenUDP.java     | 178 +++++++++----------
 .../nifi/processors/standard/MergeContent.java  |   6 +-
 .../nifi/processors/standard/PostHTTP.java      |  47 +++--
 .../nifi/processors/standard/PutEmail.java      |   6 +-
 .../apache/nifi/processors/standard/PutFTP.java |   9 +-
 .../processors/standard/PutFileTransfer.java    |  11 +-
 .../apache/nifi/processors/standard/PutJMS.java |   3 +-
 .../nifi/processors/standard/PutSFTP.java       |   6 +-
 .../nifi/processors/standard/ReplaceText.java   |  28 ++-
 .../standard/ReplaceTextWithMapping.java        |  13 +-
 .../processors/standard/RouteOnAttribute.java   |   6 +-
 .../nifi/processors/standard/ScanAttribute.java |   5 +-
 .../nifi/processors/standard/SplitContent.java  |   9 +-
 .../nifi/processors/standard/SplitText.java     |  18 +-
 .../nifi/processors/standard/SplitXml.java      |   3 +-
 .../nifi/processors/standard/TransformXml.java  |  52 +++---
 .../nifi/processors/standard/UnpackContent.java |  39 ++--
 .../nifi/processors/standard/ValidateXml.java   |  51 +++---
 .../servlets/ContentAcknowledgmentServlet.java  |   5 -
 .../standard/servlets/ListenHTTPServlet.java    |   5 -
 .../nifi/processors/standard/util/Bin.java      |  22 +--
 .../processors/standard/util/BinManager.java    |   2 +-
 .../standard/util/DocumentReaderCallback.java   |  10 +-
 .../processors/standard/util/FTPTransfer.java   |   3 +-
 .../nifi/processors/standard/util/FTPUtils.java |   2 +-
 .../processors/standard/util/FileTransfer.java  |  29 ++-
 .../processors/standard/util/SFTPTransfer.java  |   9 +-
 .../standard/util/XmlSplitterSaxParser.java     |  11 +-
 .../processors/standard/TestDistributeLoad.java |   3 +-
 .../standard/TestHandleHttpRequest.java         |   4 +-
 .../distributed/cache/client/CommsSession.java  |  16 +-
 .../DistributedMapCacheClientService.java       |   7 +-
 .../DistributedSetCacheClientService.java       |   6 +-
 .../cache/client/SSLCommsSession.java           |  25 +--
 .../cache/client/StandardCommsSession.java      |   1 +
 .../additionalDetails.html                      |  60 +++----
 .../cache/server/AbstractCacheServer.java       |  25 +--
 .../distributed/cache/server/CacheRecord.java   |  12 +-
 .../distributed/cache/server/CacheServer.java   |   3 +-
 .../cache/server/DistributedCacheServer.java    |   3 +-
 .../cache/server/DistributedSetCacheServer.java |  13 +-
 .../cache/server/EvictionPolicy.java            |  24 +--
 .../cache/server/SetCacheServer.java            |  25 +--
 .../server/map/DistributedMapCacheServer.java   |  12 +-
 .../distributed/cache/server/map/MapCache.java  |   4 +
 .../cache/server/map/MapCacheRecord.java        |  19 +-
 .../cache/server/map/MapCacheServer.java        | 113 ++++++------
 .../cache/server/map/MapPutResult.java          |   5 +-
 .../cache/server/map/PersistentMapCache.java    |  51 +++---
 .../cache/server/map/SimpleMapCache.java        |  47 ++---
 .../cache/server/set/PersistentSetCache.java    |  57 +++---
 .../distributed/cache/server/set/SetCache.java  |   5 +-
 .../cache/server/set/SetCacheRecord.java        |  15 +-
 .../cache/server/set/SetCacheResult.java        |  11 +-
 .../cache/server/set/SimpleSetCache.java        |  41 ++---
 .../additionalDetails.html                      |  62 +++----
 .../cache/server/TestServerAndClient.java       |   9 +-
 .../nifi-http-context-map-api/pom.xml           |  34 ++--
 .../org/apache/nifi/http/HttpContextMap.java    |  45 +++--
 .../nifi-http-context-map/pom.xml               |  20 +--
 .../nifi/http/StandardHttpContextMap.java       |  83 ++++-----
 .../index.html                                  |  36 ++--
 .../nifi/ssl/StandardSSLContextService.java     |   3 +-
 .../apache/nifi/ssl/SSLContextServiceTest.java  |   4 +-
 95 files changed, 916 insertions(+), 1008 deletions(-)
----------------------------------------------------------------------



[37/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
index 1bf3f77..2b3657e 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessGroupResource.java
@@ -84,7 +84,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Get the processor resource within the specified group.
      *
-     * @return
+     * @return the processor resource within the specified group
      */
     @Path("processors")
     public ProcessorResource getProcessorResource() {
@@ -96,7 +96,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Get the connection sub-resource within the specified group.
      *
-     * @return
+     * @return the connection sub-resource within the specified group
      */
     @Path("connections")
     public ConnectionResource getConnectionResource() {
@@ -108,7 +108,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Get the input ports sub-resource within the specified group.
      *
-     * @return
+     * @return the input ports sub-resource within the specified group
      */
     @Path("input-ports")
     public InputPortResource getInputPortResource() {
@@ -120,7 +120,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Get the output ports sub-resource within the specified group.
      *
-     * @return
+     * @return the output ports sub-resource within the specified group
      */
     @Path("output-ports")
     public OutputPortResource getOutputPortResource() {
@@ -132,7 +132,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Locates the label sub-resource within the specified group.
      *
-     * @return
+     * @return the label sub-resource within the specified group
      */
     @Path("labels")
     public LabelResource getLabelResource() {
@@ -144,7 +144,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Locates the funnel sub-resource within the specified group.
      *
-     * @return
+     * @return the funnel sub-resource within the specified group
      */
     @Path("funnels")
     public FunnelResource getFunnelResource() {
@@ -156,7 +156,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Locates the remote process group sub-resource within the specified group.
      *
-     * @return
+     * @return the remote process group sub-resource within the specified group
      */
     @Path("remote-process-groups")
     public RemoteProcessGroupResource getRemoteProcessGroupResource() {
@@ -168,8 +168,8 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Populates the remaining fields in the specified process groups.
      *
-     * @param processGroups
-     * @return
+     * @param processGroups groups
+     * @return group dto
      */
     public Set<ProcessGroupDTO> populateRemainingProcessGroupsContent(Set<ProcessGroupDTO> processGroups) {
         for (ProcessGroupDTO processGroup : processGroups) {
@@ -181,9 +181,9 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Populates the remaining fields in the specified process group.
      *
-     * @param processGroup
-     * @param verbose
-     * @return
+     * @param processGroup group
+     * @param processGroupUri processGroupUri
+     * @return group dto
      */
     private ProcessGroupDTO populateRemainingProcessGroupContent(ProcessGroupDTO processGroup, String processGroupUri) {
         FlowSnippetDTO flowSnippet = processGroup.getContents();
@@ -201,9 +201,6 @@ public class ProcessGroupResource extends ApplicationResource {
 
     /**
      * Populates the remaining content of the specified snippet.
-     *
-     * @param snippet
-     * @return
      */
     private FlowSnippetDTO populateRemainingSnippetContent(FlowSnippetDTO snippet) {
         getProcessorResource().populateRemainingProcessorsContent(snippet.getProcessors());
@@ -224,9 +221,6 @@ public class ProcessGroupResource extends ApplicationResource {
 
     /**
      * Generates a URI for a process group.
-     *
-     * @param processGroupId
-     * @return
      */
     private String getProcessGroupUri(String processGroupId) {
         return generateResourceUri("controller", "process-groups", processGroupId);
@@ -234,28 +228,17 @@ public class ProcessGroupResource extends ApplicationResource {
 
     /**
      * Generates a URI for a process group reference.
-     *
-     * @param processGroupId
-     * @return
      */
     private String getProcessGroupReferenceUri(ProcessGroupDTO processGroup) {
         return generateResourceUri("controller", "process-groups", processGroup.getParentGroupId(), "process-group-references", processGroup.getId());
     }
 
     /**
-     * Retrieves the content of the specified group. This includes all
-     * processors, the connections, the process group references, the remote
-     * process group references, and the labels.
+     * Retrieves the content of the specified group. This includes all processors, the connections, the process group references, the remote process group references, and the labels.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param recursive Optional recursive flag that defaults to false. If set
-     * to true, all descendent groups and their content will be included if the
-     * verbose flag is also set to true.
-     * @param verbose Optional verbose flag that defaults to false. If the
-     * verbose flag is set to true processor configuration and property details
-     * will be included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param recursive Optional recursive flag that defaults to false. If set to true, all descendent groups and their content will be included if the verbose flag is also set to true.
+     * @param verbose Optional verbose flag that defaults to false. If the verbose flag is set to true processor configuration and property details will be included in the response.
      * @return A processGroupEntity.
      */
     @GET
@@ -300,12 +283,9 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Copies the specified snippet within this ProcessGroup.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param snippetId The id of the snippet to copy.
      * @param originX The x coordinate of the origin of the bounding box.
      * @param originY The y coordinate of the origin of the bounding box.
@@ -379,12 +359,9 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Instantiates the specified template within this ProcessGroup.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param templateId The id of the template to instantiate.
      * @param originX The x coordinate of the origin of the bounding box.
      * @param originY The y coordinate of the origin of the bounding box.
@@ -453,18 +430,12 @@ public class ProcessGroupResource extends ApplicationResource {
     }
 
     /**
-     * Updates the state of all processors in the process group. Supports
-     * modifying whether the processors and process groups are running/stopped
-     * and instantiating templates.
+     * Updates the state of all processors in the process group. Supports modifying whether the processors and process groups are running/stopped and instantiating templates.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param running Optional flag that indicates whether all processors in
-     * this group should be started/stopped.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param running Optional flag that indicates whether all processors in this group should be started/stopped.
      * @return A processGroupEntity.
      */
     @PUT
@@ -501,11 +472,9 @@ public class ProcessGroupResource extends ApplicationResource {
     }
 
     /**
-     * Updates the state of all processors in the process group. Supports
-     * modifying whether the processors and process groups are running/stopped
-     * and instantiating templates.
+     * Updates the state of all processors in the process group. Supports modifying whether the processors and process groups are running/stopped and instantiating templates.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param processGroupEntity A processGroupEntity
      * @return A processGroupEntity
      */
@@ -573,16 +542,10 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the contents of the specified group.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param recursive Optional recursive flag that defaults to false. If set
-     * to true, all descendent groups and their content will be included if the
-     * verbose flag is also set to true.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param recursive Optional recursive flag that defaults to false. If set to true, all descendent groups and their content will be included if the verbose flag is also set to true.
      * @param processGroupReferenceId The id of the process group.
-     * @param verbose Optional verbose flag that defaults to false. If the
-     * verbose flag is set to true processor configuration and property details
-     * will be included in the response.
+     * @param verbose Optional verbose flag that defaults to false. If the verbose flag is set to true processor configuration and property details will be included in the response.
      * @return A processGroupEntity.
      */
     @GET
@@ -629,12 +592,8 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the content of the specified group reference.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param verbose Optional verbose flag that defaults to false. If the
-     * verbose flag is set to true processor configuration and property details
-     * will be included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param verbose Optional verbose flag that defaults to false. If the verbose flag is set to true processor configuration and property details will be included in the response.
      * @return A controllerEntity.
      */
     @GET
@@ -676,12 +635,9 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Adds the specified process group.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of the process group
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
@@ -729,7 +685,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Adds the specified process group.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param processGroupEntity A processGroupEntity
      * @return A processGroupEntity
      */
@@ -810,17 +766,13 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified process group.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the process group
      * @param name The name of the process group.
      * @param comments The comments for the process group.
-     * @param running Optional flag that indicates whether all processors should
-     * be started/stopped.
+     * @param running Optional flag that indicates whether all processors should be started/stopped.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @return A processGroupEntity.
@@ -874,7 +826,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified process group.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the process group.
      * @param processGroupEntity A processGroupEntity.
      * @return A processGroupEntity.
@@ -945,12 +897,9 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Removes the specified process group reference.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the process group to be removed.
      * @return A processGroupEntity.
      */
@@ -1002,12 +951,8 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the status report for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param recursive Optional recursive flag that defaults to false. If set
-     * to true, all descendent groups and their content will be included if the
-     * verbose flag is also set to true.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param recursive Optional recursive flag that defaults to false. If set to true, all descendent groups and their content will be included if the verbose flag is also set to true.
      * @return A processGroupStatusEntity.
      */
     @GET
@@ -1050,9 +995,7 @@ public class ProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the specified remote process groups status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processorEntity.
      */
     @GET

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
index 00b6fe3..16c7e19 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProcessorResource.java
@@ -100,8 +100,8 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Populate the uri's for the specified processors and their relationships.
      *
-     * @param processors
-     * @return
+     * @param processors processors
+     * @return dtos
      */
     public Set<ProcessorDTO> populateRemainingProcessorsContent(Set<ProcessorDTO> processors) {
         for (ProcessorDTO processor : processors) {
@@ -112,9 +112,6 @@ public class ProcessorResource extends ApplicationResource {
 
     /**
      * Populate the uri's for the specified processor and its relationships.
-     *
-     * @param processor
-     * @return
      */
     private ProcessorDTO populateRemainingProcessorContent(ProcessorDTO processor) {
         // populate the remaining properties
@@ -147,9 +144,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Retrieves all the processors in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A processorsEntity.
      */
     @GET
@@ -182,15 +177,11 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Creates a new processor.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of the new processor.
-     * @param type The type of the new processor. This type should refer to one
-     * of the types in the GET /controller/processor-types response.
+     * @param type The type of the new processor. This type should refer to one of the types in the GET /controller/processor-types response.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
      * @return A processorEntity.
@@ -238,7 +229,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Creates a new processor.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param processorEntity A processorEntity.
      * @return A processorEntity.
      */
@@ -324,9 +315,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Retrieves the specified processor.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor to retrieve.
      * @return A processorEntity.
      */
@@ -361,9 +350,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Retrieves the specified processor status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor history to retrieve.
      * @return A statusHistoryEntity.
      */
@@ -398,9 +385,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Returns the descriptor for the specified property.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor
      * @param propertyName The property
      * @return a propertyDescriptorEntity
@@ -443,18 +428,14 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Updates the specified processor with the specified values.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor to update.
      * @param x The x coordinate for this processors position.
      * @param y The y coordinate for this processors position.
      * @param name The name of the processor.
-     * @param concurrentlySchedulableTaskCount The number of
-     * concurrentlySchedulableTasks
+     * @param concurrentlySchedulableTaskCount The number of concurrentlySchedulableTasks
      * @param schedulingPeriod The scheduling period
      * @param schedulingStrategy The scheduling strategy
      * @param penaltyDuration The penalty duration
@@ -462,13 +443,10 @@ public class ProcessorResource extends ApplicationResource {
      * @param runDurationMillis The run duration in milliseconds
      * @param bulletinLevel The bulletin level
      * @param comments Any comments about this processor.
-     * @param markedForDeletion Array of property names whose value should be
-     * removed.
+     * @param markedForDeletion Array of property names whose value should be removed.
      * @param state The processors state.
-     * @param formParams Additionally, the processor properties and styles are
-     * specified in the form parameters. Because the property names and styles
-     * differ from processor to processor they are specified in a map-like
-     * fashion:
+     * @param formParams Additionally, the processor properties and styles are specified in the form parameters. Because the property names and styles differ from processor to processor they are
+     * specified in a map-like fashion:
      * <br>
      * <ul>
      * <li>properties[required.file.path]=/path/to/file</li>
@@ -604,7 +582,7 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Updates the specified processor with the specified values.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the processor to update.
      * @param processorEntity A processorEntity.
      * @return A processorEntity.
@@ -686,12 +664,9 @@ public class ProcessorResource extends ApplicationResource {
     /**
      * Removes the specified processor.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the processor to remove.
      * @return A processorEntity.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
index 5fef27f..4bfe3a0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ProvenanceResource.java
@@ -113,9 +113,7 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the provenance search options for this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A provenanceOptionsEntity
      */
     @GET
@@ -146,15 +144,11 @@ public class ProvenanceResource extends ApplicationResource {
     }
 
     /**
-     * Creates a new replay request for the content associated with the
-     * specified provenance event id.
+     * Creates a new replay request for the content associated with the specified provenance event id.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param clusterNodeId The id of the node in the cluster that has the
-     * specified event. Required if clustered.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clusterNodeId The id of the node in the cluster that has the specified event. Required if clustered.
      * @param eventId The provenance event id.
      * @return A provenanceEventEntity
      */
@@ -220,11 +214,8 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the content for the input of the specified event.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param clusterNodeId The id of the node within the cluster this content
-     * is on. Required if clustered.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clusterNodeId The id of the node within the cluster this content is on. Required if clustered.
      * @param id The id of the provenance event associated with this content.
      * @return The content stream
      */
@@ -294,11 +285,8 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the content for the output of the specified event.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param clusterNodeId The id of the node within the cluster this content
-     * is on. Required if clustered.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clusterNodeId The id of the node within the cluster this content is on. Required if clustered.
      * @param id The id of the provenance event associated with this content.
      * @return The content stream
      */
@@ -368,21 +356,15 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Creates provenance using the specified query criteria.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param startDate The start date.
      * @param endDate The end date.
      * @param minimumFileSize The minimum size of the content after the event.
      * @param maximumFileSize The maximum size of the content after the event.
      * @param maxResults The maximum number of results to return.
-     * @param clusterNodeId The id of node in the cluster to search. This is
-     * optional and only relevant when clustered. If clustered and it is not
-     * specified the entire cluster is searched.
-     * @param formParams Additionally, the search parameters are specified in
-     * the form parameters. Because the search parameters differ based on
-     * configuration they are specified in a map-like fashion:
+     * @param clusterNodeId The id of node in the cluster to search. This is optional and only relevant when clustered. If clustered and it is not specified the entire cluster is searched.
+     * @param formParams Additionally, the search parameters are specified in the form parameters. Because the search parameters differ based on configuration they are specified in a map-like fashion:
      * <br>
      * <ul>
      * <li>search[filename]=myFile.txt</li>
@@ -467,7 +449,7 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Creates provenance using the specified query criteria.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param provenanceEntity A provenanceEntity
      * @return A provenanceEntity
      */
@@ -560,13 +542,9 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the provenance with the specified id.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the provenance
-     * @param clusterNodeId The id of node in the cluster to search. This is
-     * optional and only relevant when clustered. If clustered and it is not
-     * specified the entire cluster is searched.
+     * @param clusterNodeId The id of node in the cluster to search. This is optional and only relevant when clustered. If clustered and it is not specified the entire cluster is searched.
      * @return A provenanceEntity
      */
     @GET
@@ -621,14 +599,10 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Deletes the provenance with the specified id.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the provenance
-     * @param clusterNodeId The id of node in the cluster to search. This is
-     * optional and only relevant when clustered. If clustered and it is not
-     * specified the entire cluster is searched.
+     * @param clusterNodeId The id of node in the cluster to search. This is optional and only relevant when clustered. If clustered and it is not specified the entire cluster is searched.
      * @return A provenanceEntity
      */
     @DELETE
@@ -687,12 +661,9 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the details for a provenance event.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the event
-     * @param clusterNodeId The id of node in the cluster that the
-     * event/flowfile originated from. This is only required when clustered.
+     * @param clusterNodeId The id of node in the cluster that the event/flowfile originated from. This is only required when clustered.
      * @return A provenanceEventEntity
      */
     @GET
@@ -752,29 +723,18 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Submits a lineage request based on an event or a flowfile uuid.
      *
-     * When querying for the lineage of an event you must specify the eventId
-     * and the eventDirection. The eventDirection must be 'parents' or
-     * 'children' and specifies whether we are going up or down the flowfile
-     * ancestry. The uuid cannot be specified in these cases.
+     * When querying for the lineage of an event you must specify the eventId and the eventDirection. The eventDirection must be 'parents' or 'children' and specifies whether we are going up or down
+     * the flowfile ancestry. The uuid cannot be specified in these cases.
      *
-     * When querying for the lineage of a flowfile you must specify the uuid.
-     * The eventId and eventDirection cannot be specified in this case.
+     * When querying for the lineage of a flowfile you must specify the uuid. The eventId and eventDirection cannot be specified in this case.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param eventId The id of an event to get the lineage for. Must also
-     * specify the eventDirection and not the uuid.
-     * @param lineageRequest Either 'PARENTS', 'CHILDREN', or 'FLOWFILE'.
-     * PARENTS will return the lineage for the flowfiles that are parents of the
-     * specified event. CHILDREN will return the lineage of for the flowfiles
-     * that are children of the specified event. FLOWFILE will return the
-     * lineage for the specified flowfile.
-     * @param uuid The uuid of the flowfile to get the lineage for. Must not
-     * specify the eventId or eventDirection.
-     * @param clusterNodeId The id of node in the cluster that the
-     * event/flowfile originated from. This is only required when clustered.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param eventId The id of an event to get the lineage for. Must also specify the eventDirection and not the uuid.
+     * @param lineageRequest Either 'PARENTS', 'CHILDREN', or 'FLOWFILE'. PARENTS will return the lineage for the flowfiles that are parents of the specified event. CHILDREN will return the lineage of
+     * for the flowfiles that are children of the specified event. FLOWFILE will return the lineage for the specified flowfile.
+     * @param uuid The uuid of the flowfile to get the lineage for. Must not specify the eventId or eventDirection.
+     * @param clusterNodeId The id of node in the cluster that the event/flowfile originated from. This is only required when clustered.
      * @return A lineageEntity
      */
     @POST
@@ -829,15 +789,12 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Submits a lineage request based on an event or a flowfile uuid.
      *
-     * When querying for the lineage of an event you must specify the eventId
-     * and the eventDirection. The eventDirection must be 'parents' or
-     * 'children' and specifies whether we are going up or down the flowfile
-     * ancestry. The uuid cannot be specified in these cases.
+     * When querying for the lineage of an event you must specify the eventId and the eventDirection. The eventDirection must be 'parents' or 'children' and specifies whether we are going up or down
+     * the flowfile ancestry. The uuid cannot be specified in these cases.
      *
-     * When querying for the lineage of a flowfile you must specify the uuid.
-     * The eventId and eventDirection cannot be specified in this case.
+     * When querying for the lineage of a flowfile you must specify the uuid. The eventId and eventDirection cannot be specified in this case.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param lineageEntity A lineageEntity
      * @return A lineageEntity
      */
@@ -935,11 +892,8 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Gets the lineage with the specified id.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param clusterNodeId The id of node in the cluster that the
-     * event/flowfile originated from. This is only required when clustered.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clusterNodeId The id of node in the cluster that the event/flowfile originated from. This is only required when clustered.
      * @param id The id of the lineage
      * @return A lineageEntity
      */
@@ -993,12 +947,9 @@ public class ProvenanceResource extends ApplicationResource {
     /**
      * Deletes the lineage with the specified id.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param clusterNodeId The id of node in the cluster that the
-     * event/flowfile originated from. This is only required when clustered.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param clusterNodeId The id of node in the cluster that the event/flowfile originated from. This is only required when clustered.
      * @param id The id of the lineage
      * @return A lineageEntity
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
index b171835..c506b9b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/RemoteProcessGroupResource.java
@@ -81,11 +81,10 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     private String groupId;
 
     /**
-     * Populates the remaining content for each remote process group. The uri
-     * must be generated and the remote process groups name must be retrieved.
+     * Populates the remaining content for each remote process group. The uri must be generated and the remote process groups name must be retrieved.
      *
-     * @param remoteProcessGroups
-     * @return
+     * @param remoteProcessGroups groups
+     * @return dtos
      */
     public Set<RemoteProcessGroupDTO> populateRemainingRemoteProcessGroupsContent(Set<RemoteProcessGroupDTO> remoteProcessGroups) {
         for (RemoteProcessGroupDTO remoteProcessGroup : remoteProcessGroups) {
@@ -95,12 +94,10 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     }
 
     /**
-     * Populates the remaining content for the specified remote process group.
-     * The uri must be generated and the remote process groups name must be
-     * retrieved.
+     * Populates the remaining content for the specified remote process group. The uri must be generated and the remote process groups name must be retrieved.
      *
-     * @param remoteProcessGroup
-     * @return
+     * @param remoteProcessGroup group
+     * @return dto
      */
     private RemoteProcessGroupDTO populateRemainingRemoteProcessGroupContent(RemoteProcessGroupDTO remoteProcessGroup) {
         // populate the remaining content
@@ -112,12 +109,8 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves all the of remote process groups in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param verbose Optional verbose flag that defaults to false. If the
-     * verbose flag is set to true remote group contents (ports) will be
-     * included.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param verbose Optional verbose flag that defaults to false. If the verbose flag is set to true remote group contents (ports) will be included.
      * @return A remoteProcessGroupEntity.
      */
     @GET
@@ -159,12 +152,8 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the specified remote process group.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param verbose Optional verbose flag that defaults to false. If the
-     * verbose flag is set to true remote group contents (ports) will be
-     * included.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param verbose Optional verbose flag that defaults to false. If the verbose flag is set to true remote group contents (ports) will be included.
      * @param id The id of the remote process group to retrieve
      * @return A remoteProcessGroupEntity.
      */
@@ -206,9 +195,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Retrieves the specified remote process groups status history.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group to retrieve the status fow.
      * @return A statusHistoryEntity.
      */
@@ -243,12 +230,9 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Creates a new remote process group.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param uri The uri to the remote process group that is being referenced.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
@@ -296,7 +280,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Creates a new remote process group.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param remoteProcessGroupEntity A remoteProcessGroupEntity.
      * @return A remoteProcessGroupEntity.
      */
@@ -409,12 +393,9 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Removes the specified remote process group.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group to be removed.
      * @return A remoteProcessGroupEntity.
      */
@@ -465,18 +446,14 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group input port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group to update.
      * @param portId The id of the input port to update.
      * @param isTransmitting Whether or not this port is transmitting.
      * @param isCompressed Whether or not this port should compress.
-     * @param concurrentlySchedulableTaskCount The number of concurrent tasks
-     * that should be supported
+     * @param concurrentlySchedulableTaskCount The number of concurrent tasks that should be supported
      *
      * @return A remoteProcessGroupPortEntity
      */
@@ -525,7 +502,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group input port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the remote process group to update.
      * @param portId The id of the input port to update.
      * @param remoteProcessGroupPortEntity The remoteProcessGroupPortEntity
@@ -599,18 +576,14 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group output port.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group to update.
      * @param portId The id of the output port to update.
      * @param isTransmitting Whether or not this port is transmitting.
      * @param isCompressed Whether or not this port should compress.
-     * @param concurrentlySchedulableTaskCount The number of concurrent tasks
-     * that should be supported
+     * @param concurrentlySchedulableTaskCount The number of concurrent tasks that should be supported
      *
      * @return A remoteProcessGroupPortEntity
      */
@@ -659,7 +632,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group output port.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the remote process group to update.
      * @param portId The id of the output port to update.
      * @param remoteProcessGroupPortEntity The remoteProcessGroupPortEntity
@@ -733,18 +706,14 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the remote process group to update.
      * @param isTransmitting Whether this remote process group is transmitting.
      * @param x The x coordinate for this funnels position.
      * @param y The y coordinate for this funnels position.
-     * @param communicationsTimeout The timeout to use when communication with
-     * this remote process group.
+     * @param communicationsTimeout The timeout to use when communication with this remote process group.
      * @param yieldDuration The yield duration
      *
      * @return A remoteProcessGroupEntity.
@@ -798,7 +767,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
     /**
      * Updates the specified remote process group.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the remote process group to update.
      * @param remoteProcessGroupEntity A remoteProcessGroupEntity.
      * @return A remoteProcessGroupEntity.
@@ -847,7 +816,7 @@ public class RemoteProcessGroupResource extends ApplicationResource {
             return generateContinueResponse().build();
         }
 
-        // if the target uri is set we have to verify it here - we don't support updating the target uri on 
+        // if the target uri is set we have to verify it here - we don't support updating the target uri on
         // an existing remote process group, however if the remote process group is being created with an id
         // as is the case in clustered mode we need to verify the remote process group. treat this request as
         // though its a new remote process group.

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
index 485b8fd..8aea04c 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/ReportingTaskResource.java
@@ -84,8 +84,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Populates the uri for the specified reporting task.
      *
-     * @param reportingTasks
-     * @return
+     * @param reportingTasks tasks
+     * @return tasks
      */
     private Set<ReportingTaskDTO> populateRemainingReportingTasksContent(final String availability, final Set<ReportingTaskDTO> reportingTasks) {
         for (ReportingTaskDTO reportingTask : reportingTasks) {
@@ -117,11 +117,7 @@ public class ReportingTaskResource extends ApplicationResource {
     }
 
     /**
-     * Parses the availability and ensure that the specified availability makes
-     * sense for the given NiFi instance.
-     *
-     * @param availability
-     * @return
+     * Parses the availability and ensure that the specified availability makes sense for the given NiFi instance.
      */
     private Availability parseAvailability(final String availability) {
         final Availability avail;
@@ -142,12 +138,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Retrieves all the of reporting tasks in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @return A reportingTasksEntity.
      */
     @GET
@@ -182,15 +174,10 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Creates a new reporting task.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param type The type of reporting task to create.
      * @return A reportingTaskEntity.
      */
@@ -229,10 +216,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Creates a new Reporting Task.
      *
-     * @param httpServletRequest
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param httpServletRequest request
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param reportingTaskEntity A reportingTaskEntity.
      * @return A reportingTaskEntity.
      */
@@ -320,12 +305,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Retrieves the specified reporting task.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param id The id of the reporting task to retrieve
      * @return A reportingTaskEntity.
      */
@@ -362,10 +343,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Returns the descriptor for the specified property.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability availability
      * @param id The id of the reporting task.
      * @param propertyName The property
      * @return a propertyDescriptorEntity
@@ -411,28 +390,20 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Updates the specified reporting task.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param id The id of the reporting task to update.
      * @param name The name of the reporting task
      * @param annotationData The annotation data for the reporting task
-     * @param markedForDeletion Array of property names whose value should be
-     * removed.
+     * @param markedForDeletion Array of property names whose value should be removed.
      * @param state The updated scheduled state
      * @param schedulingStrategy The scheduling strategy for this reporting task
      * @param schedulingPeriod The scheduling period for this reporting task
      * @param comments The comments for this reporting task
-     * @param formParams Additionally, the processor properties and styles are
-     * specified in the form parameters. Because the property names and styles
-     * differ from processor to processor they are specified in a map-like
-     * fashion:
+     * @param formParams Additionally, the processor properties and styles are specified in the form parameters. Because the property names and styles differ from processor to processor they are
+     * specified in a map-like fashion:
      * <br>
      * <ul>
      * <li>properties[required.file.path]=/path/to/file</li>
@@ -519,10 +490,8 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Updates the specified a Reporting Task.
      *
-     * @param httpServletRequest
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param httpServletRequest request
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param id The id of the reporting task to update.
      * @param reportingTaskEntity A reportingTaskEntity.
      * @return A reportingTaskEntity.
@@ -597,15 +566,10 @@ public class ReportingTaskResource extends ApplicationResource {
     /**
      * Removes the specified reporting task.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param availability Whether the reporting task is available on the NCM
-     * only (ncm) or on the nodes only (node). If this instance is not clustered
-     * all tasks should use the node availability.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param availability Whether the reporting task is available on the NCM only (ncm) or on the nodes only (node). If this instance is not clustered all tasks should use the node availability.
      * @param id The id of the reporting task to remove.
      * @return A entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
index 275b133..997fe4a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SnippetResource.java
@@ -77,7 +77,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Get the processor resource within the specified group.
      *
-     * @return
+     * @return the processor resource within the specified group
      */
     private ProcessorResource getProcessorResource(final String groupId) {
         ProcessorResource processorResource = resourceContext.getResource(ProcessorResource.class);
@@ -88,7 +88,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Get the connection sub-resource within the specified group.
      *
-     * @return
+     * @return the connection sub-resource within the specified group
      */
     private ConnectionResource getConnectionResource(final String groupId) {
         ConnectionResource connectionResource = resourceContext.getResource(ConnectionResource.class);
@@ -99,7 +99,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Get the input ports sub-resource within the specified group.
      *
-     * @return
+     * @return the input ports sub-resource within the specified group
      */
     private InputPortResource getInputPortResource(final String groupId) {
         InputPortResource inputPortResource = resourceContext.getResource(InputPortResource.class);
@@ -110,7 +110,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Get the output ports sub-resource within the specified group.
      *
-     * @return
+     * @return the output ports sub-resource within the specified group
      */
     private OutputPortResource getOutputPortResource(final String groupId) {
         OutputPortResource outputPortResource = resourceContext.getResource(OutputPortResource.class);
@@ -121,7 +121,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Locates the label sub-resource within the specified group.
      *
-     * @return
+     * @return the label sub-resource within the specified group
      */
     private LabelResource getLabelResource(final String groupId) {
         LabelResource labelResource = resourceContext.getResource(LabelResource.class);
@@ -132,7 +132,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Locates the funnel sub-resource within the specified group.
      *
-     * @return
+     * @return the funnel sub-resource within the specified group
      */
     private FunnelResource getFunnelResource(final String groupId) {
         FunnelResource funnelResource = resourceContext.getResource(FunnelResource.class);
@@ -143,7 +143,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Locates the remote process group sub-resource within the specified group.
      *
-     * @return
+     * @return the remote process group sub-resource within the specified group
      */
     private RemoteProcessGroupResource getRemoteProcessGroupResource(final String groupId) {
         RemoteProcessGroupResource remoteProcessGroupResource = resourceContext.getResource(RemoteProcessGroupResource.class);
@@ -154,8 +154,8 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Locates the process group sub-resource within the specified group.
      *
-     * @param groupId
-     * @return
+     * @param groupId group id
+     * @return the process group sub-resource within the specified group
      */
     private ProcessGroupResource getProcessGroupResource(final String groupId) {
         ProcessGroupResource processGroupResource = resourceContext.getResource(ProcessGroupResource.class);
@@ -191,21 +191,14 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Creates a new snippet based on the specified contents.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param parentGroupId The id of the process group the components in this
-     * snippet belong to.
-     * @param linked Whether or not this snippet is linked to the underlying
-     * data flow. If a linked snippet is deleted, the components that comprise
-     * the snippet are also deleted.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param parentGroupId The id of the process group the components in this snippet belong to.
+     * @param linked Whether or not this snippet is linked to the underlying data flow. If a linked snippet is deleted, the components that comprise the snippet are also deleted.
      * @param processorIds The ids of any processors in this snippet.
      * @param processGroupIds The ids of any process groups in this snippet.
-     * @param remoteProcessGroupIds The ids of any remote process groups in this
-     * snippet.
+     * @param remoteProcessGroupIds The ids of any remote process groups in this snippet.
      * @param inputPortIds The ids of any input ports in this snippet.
      * @param outputPortIds The ids of any output ports in this snippet.
      * @param connectionIds The ids of any connections in this snippet.
@@ -266,7 +259,7 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Creates a snippet based off the specified configuration.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param snippetEntity A snippetEntity
      * @return A snippetEntity
      */
@@ -354,11 +347,8 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Retrieves the specified snippet.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param verbose Whether or not to include the contents of the snippet in
-     * the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param verbose Whether or not to include the contents of the snippet in the response.
      * @param id The id of the snippet to retrieve.
      * @return A snippetEntity.
      */
@@ -400,20 +390,13 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Updates the specified snippet.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
-     * @param verbose Whether or not to include the contents of the snippet in
-     * the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
+     * @param verbose Whether or not to include the contents of the snippet in the response.
      * @param id The id of the snippet to update.
-     * @param parentGroupId The id of the process group to move the contents of
-     * this snippet to.
-     * @param linked Whether or not this snippet is linked to the underlying
-     * data flow. If a linked snippet is deleted, the components that comprise
-     * the snippet are also deleted.
+     * @param parentGroupId The id of the process group to move the contents of this snippet to.
+     * @param linked Whether or not this snippet is linked to the underlying data flow. If a linked snippet is deleted, the components that comprise the snippet are also deleted.
      * @return A snippetEntity.
      */
     @PUT
@@ -455,10 +438,9 @@ public class SnippetResource extends ApplicationResource {
     }
 
     /**
-     * Updates the specified snippet. The contents of the snippet (component
-     * ids) cannot be updated once the snippet is created.
+     * Updates the specified snippet. The contents of the snippet (component ids) cannot be updated once the snippet is created.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param id The id of the snippet.
      * @param snippetEntity A snippetEntity
      * @return A snippetEntity
@@ -533,12 +515,9 @@ public class SnippetResource extends ApplicationResource {
     /**
      * Removes the specified snippet.
      *
-     * @param httpServletRequest
-     * @param version The revision is used to verify the client is working with
-     * the latest version of the flow.
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param version The revision is used to verify the client is working with the latest version of the flow.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the snippet to remove.
      * @return A entity containing the client id and an updated revision.
      */

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
index 915c55e..f747c47 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SystemDiagnosticsResource.java
@@ -49,9 +49,7 @@ public class SystemDiagnosticsResource extends ApplicationResource {
     /**
      * Gets the system diagnostics for this NiFi instance.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A systemDiagnosticsEntity.
      */
     @GET

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
index 24292e9..00707be 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/TemplateResource.java
@@ -72,8 +72,8 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Populates the uri for the specified templates.
      *
-     * @param templates
-     * @return
+     * @param templates templates
+     * @return templates
      */
     public Set<TemplateDTO> populateRemainingTemplatesContent(Set<TemplateDTO> templates) {
         for (TemplateDTO template : templates) {
@@ -94,9 +94,7 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Retrieves all the of templates in this NiFi.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @return A templatesEntity.
      */
     @GET
@@ -130,10 +128,8 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Creates a new template based off of the specified template.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param name The name of the template.
      * @param description The description of the template.
      * @param snippetId The id of the snippet this template is based on.
@@ -181,10 +177,8 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Imports the specified template.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param in The template stream
      * @return A templateEntity or an errorResponse XML snippet.
      */
@@ -215,7 +209,8 @@ public class TemplateResource extends ApplicationResource {
             return Response.status(Response.Status.OK).entity(responseXml).type("application/xml").build();
         } catch (Exception e) {
             logger.warn("An error occurred while importing a template.", e);
-            String responseXml = String.format("<errorResponse status=\"%s\" statusText=\"Unable to import the specified template: %s\"/>", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
+            String responseXml = String.format("<errorResponse status=\"%s\" statusText=\"Unable to import the specified template: %s\"/>",
+                    Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
             return Response.status(Response.Status.OK).entity(responseXml).type("application/xml").build();
         }
 
@@ -245,7 +240,7 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Imports the specified template.
      *
-     * @param httpServletRequest
+     * @param httpServletRequest request
      * @param templateEntity A templateEntity.
      * @return A templateEntity.
      */
@@ -295,7 +290,8 @@ public class TemplateResource extends ApplicationResource {
             return Response.status(Response.Status.OK).entity(responseXml).type("application/xml").build();
         } catch (Exception e) {
             logger.warn("An error occurred while importing a template.", e);
-            String responseXml = String.format("<errorResponse status=\"%s\" statusText=\"Unable to import the specified template: %s\"/>", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
+            String responseXml
+                    = String.format("<errorResponse status=\"%s\" statusText=\"Unable to import the specified template: %s\"/>", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
             return Response.status(Response.Status.OK).entity(responseXml).type("application/xml").build();
         }
     }
@@ -303,9 +299,7 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Retrieves the specified template.
      *
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the template to retrieve
      * @return A templateEntity.
      */
@@ -344,10 +338,8 @@ public class TemplateResource extends ApplicationResource {
     /**
      * Removes the specified template.
      *
-     * @param httpServletRequest
-     * @param clientId Optional client id. If the client id is not specified, a
-     * new one will be generated. This value (whether specified or generated) is
-     * included in the response.
+     * @param httpServletRequest request
+     * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
      * @param id The id of the template to remove.
      * @return A templateEntity.
      */


[09/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
index 451ba57..6f228b2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
@@ -94,26 +94,26 @@ public class UnpackContent extends AbstractProcessor {
 
     public static final String OCTET_STREAM = "application/octet-stream";
 
-    public static final PropertyDescriptor PACKAGING_FORMAT = new PropertyDescriptor.Builder().
-            name("Packaging Format").
-            description("The Packaging Format used to create the file").
-            required(true).
-            allowableValues(AUTO_DETECT_FORMAT, TAR_FORMAT, ZIP_FORMAT, FLOWFILE_STREAM_FORMAT_V3, FLOWFILE_STREAM_FORMAT_V2, FLOWFILE_TAR_FORMAT).
-            defaultValue(AUTO_DETECT_FORMAT).
-            build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder().
-            name("success").
-            description("Unpacked FlowFiles are sent to this relationship").
-            build();
-    public static final Relationship REL_ORIGINAL = new Relationship.Builder().
-            name("original").
-            description("The original FlowFile is sent to this relationship after it has been successfully unpacked").
-            build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder().
-            name("failure").
-            description("The original FlowFile is sent to this relationship when it cannot be unpacked for some reason").
-            build();
+    public static final PropertyDescriptor PACKAGING_FORMAT = new PropertyDescriptor.Builder()
+            .name("Packaging Format")
+            .description("The Packaging Format used to create the file")
+            .required(true)
+            .allowableValues(AUTO_DETECT_FORMAT, TAR_FORMAT, ZIP_FORMAT, FLOWFILE_STREAM_FORMAT_V3, FLOWFILE_STREAM_FORMAT_V2, FLOWFILE_TAR_FORMAT)
+            .defaultValue(AUTO_DETECT_FORMAT)
+            .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("Unpacked FlowFiles are sent to this relationship")
+            .build();
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original FlowFile is sent to this relationship after it has been successfully unpacked")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("The original FlowFile is sent to this relationship when it cannot be unpacked for some reason")
+            .build();
 
     private Set<Relationship> relationships;
     private List<PropertyDescriptor> properties;
@@ -149,15 +149,11 @@ public class UnpackContent extends AbstractProcessor {
         }
 
         final ProcessorLog logger = getLogger();
-        String packagingFormat = context.getProperty(PACKAGING_FORMAT).
-                getValue().
-                toLowerCase();
+        String packagingFormat = context.getProperty(PACKAGING_FORMAT).getValue().toLowerCase();
         if (AUTO_DETECT_FORMAT.equals(packagingFormat)) {
-            final String mimeType = flowFile.
-                    getAttribute(CoreAttributes.MIME_TYPE.key());
+            final String mimeType = flowFile.getAttribute(CoreAttributes.MIME_TYPE.key());
             if (mimeType == null) {
-                logger.
-                        error("No mime.type attribute set for {}; routing to failure", new Object[]{flowFile});
+                logger.error("No mime.type attribute set for {}; routing to failure", new Object[]{flowFile});
                 session.transfer(flowFile, REL_FAILURE);
                 return;
             }
@@ -179,8 +175,7 @@ public class UnpackContent extends AbstractProcessor {
                     packagingFormat = FLOWFILE_TAR_FORMAT;
                     break;
                 default: {
-                    logger.
-                            info("Cannot unpack {} because its mime.type attribute is set to '{}', which is not a format that can be unpacked; routing to 'success'", new Object[]{flowFile, mimeType});
+                    logger.info("Cannot unpack {} because its mime.type attribute is set to '{}', which is not a format that can be unpacked; routing to 'success'", new Object[]{flowFile, mimeType});
                     session.transfer(flowFile, REL_SUCCESS);
                     return;
                 }
@@ -211,17 +206,14 @@ public class UnpackContent extends AbstractProcessor {
                 addFragmentAttrs = false;
                 break;
             default:
-                throw new AssertionError("Packaging Format was " + context.
-                        getProperty(PACKAGING_FORMAT).
-                        getValue());
+                throw new AssertionError("Packaging Format was " + context.getProperty(PACKAGING_FORMAT).getValue());
         }
 
         final List<FlowFile> unpacked = new ArrayList<>();
         try {
             unpacker.unpack(session, flowFile, unpacked);
             if (unpacked.isEmpty()) {
-                logger.
-                        error("Unable to unpack {} because it does not appear to have any entries; routing to failure", new Object[]{flowFile});
+                logger.error("Unable to unpack {} because it does not appear to have any entries; routing to failure", new Object[]{flowFile});
                 session.transfer(flowFile, REL_FAILURE);
                 return;
             }
@@ -231,13 +223,10 @@ public class UnpackContent extends AbstractProcessor {
             }
             session.transfer(unpacked, REL_SUCCESS);
             session.transfer(flowFile, REL_ORIGINAL);
-            session.getProvenanceReporter().
-                    fork(flowFile, unpacked);
-            logger.
-                    info("Unpacked {} into {} and transferred to success", new Object[]{flowFile, unpacked});
+            session.getProvenanceReporter().fork(flowFile, unpacked);
+            logger.info("Unpacked {} into {} and transferred to success", new Object[]{flowFile, unpacked});
         } catch (final ProcessException e) {
-            logger.
-                    error("Unable to unpack {} due to {}; routing to failure", new Object[]{flowFile, e});
+            logger.error("Unable to unpack {} due to {}; routing to failure", new Object[]{flowFile, e});
             session.transfer(flowFile, REL_FAILURE);
             session.remove(unpacked);
         }
@@ -252,8 +241,7 @@ public class UnpackContent extends AbstractProcessor {
 
         @Override
         public void unpack(final ProcessSession session, final FlowFile source, final List<FlowFile> unpacked) {
-            final String fragmentId = UUID.randomUUID().
-                    toString();
+            final String fragmentId = UUID.randomUUID().toString();
             session.read(source, new InputStreamCallback() {
                 @Override
                 public void process(final InputStream in) throws IOException {
@@ -268,38 +256,28 @@ public class UnpackContent extends AbstractProcessor {
                             final Path filePath = file.toPath();
                             final String filePathString = filePath.getParent() + "/";
                             final Path absPath = filePath.toAbsolutePath();
-                            final String absPathString = absPath.getParent().
-                                    toString() + "/";
+                            final String absPathString = absPath.getParent().toString() + "/";
 
                             FlowFile unpackedFile = session.create(source);
                             try {
                                 final Map<String, String> attributes = new HashMap<>();
-                                attributes.
-                                        put(CoreAttributes.FILENAME.key(), file.
-                                                getName());
-                                attributes.
-                                        put(CoreAttributes.PATH.key(), filePathString);
-                                attributes.put(CoreAttributes.ABSOLUTE_PATH.
-                                        key(), absPathString);
-                                attributes.
-                                        put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
+                                attributes.put(CoreAttributes.FILENAME.key(), file.getName());
+                                attributes.put(CoreAttributes.PATH.key(), filePathString);
+                                attributes.put(CoreAttributes.ABSOLUTE_PATH.key(), absPathString);
+                                attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
 
                                 attributes.put(FRAGMENT_ID, fragmentId);
-                                attributes.put(FRAGMENT_INDEX, String.
-                                        valueOf(++fragmentCount));
+                                attributes.put(FRAGMENT_INDEX, String.valueOf(++fragmentCount));
 
-                                unpackedFile = session.
-                                        putAllAttributes(unpackedFile, attributes);
+                                unpackedFile = session.putAllAttributes(unpackedFile, attributes);
 
                                 final long fileSize = tarEntry.getSize();
-                                unpackedFile = session.
-                                        write(unpackedFile, new OutputStreamCallback() {
-                                            @Override
-                                            public void process(final OutputStream out) throws IOException {
-                                                StreamUtils.
-                                                copy(tarIn, out, fileSize);
-                                            }
-                                        });
+                                unpackedFile = session.write(unpackedFile, new OutputStreamCallback() {
+                                    @Override
+                                    public void process(final OutputStream out) throws IOException {
+                                        StreamUtils.copy(tarIn, out, fileSize);
+                                    }
+                                });
                             } finally {
                                 unpacked.add(unpackedFile);
                             }
@@ -314,8 +292,7 @@ public class UnpackContent extends AbstractProcessor {
 
         @Override
         public void unpack(final ProcessSession session, final FlowFile source, final List<FlowFile> unpacked) {
-            final String fragmentId = UUID.randomUUID().
-                    toString();
+            final String fragmentId = UUID.randomUUID().toString();
             session.read(source, new InputStreamCallback() {
                 @Override
                 public void process(final InputStream in) throws IOException {
@@ -327,39 +304,28 @@ public class UnpackContent extends AbstractProcessor {
                                 continue;
                             }
                             final File file = new File(zipEntry.getName());
-                            final String parentDirectory = (file.getParent() == null) ? "/" : file.
-                                    getParent();
-                            final Path absPath = file.toPath().
-                                    toAbsolutePath();
-                            final String absPathString = absPath.getParent().
-                                    toString() + "/";
+                            final String parentDirectory = (file.getParent() == null) ? "/" : file.getParent();
+                            final Path absPath = file.toPath().toAbsolutePath();
+                            final String absPathString = absPath.getParent().toString() + "/";
 
                             FlowFile unpackedFile = session.create(source);
                             try {
                                 final Map<String, String> attributes = new HashMap<>();
-                                attributes.
-                                        put(CoreAttributes.FILENAME.key(), file.
-                                                getName());
-                                attributes.
-                                        put(CoreAttributes.PATH.key(), parentDirectory);
-                                attributes.put(CoreAttributes.ABSOLUTE_PATH.
-                                        key(), absPathString);
-                                attributes.
-                                        put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
+                                attributes.put(CoreAttributes.FILENAME.key(), file.getName());
+                                attributes.put(CoreAttributes.PATH.key(), parentDirectory);
+                                attributes.put(CoreAttributes.ABSOLUTE_PATH.key(), absPathString);
+                                attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
 
                                 attributes.put(FRAGMENT_ID, fragmentId);
-                                attributes.put(FRAGMENT_INDEX, String.
-                                        valueOf(++fragmentCount));
-
-                                unpackedFile = session.
-                                        putAllAttributes(unpackedFile, attributes);
-                                unpackedFile = session.
-                                        write(unpackedFile, new OutputStreamCallback() {
-                                            @Override
-                                            public void process(final OutputStream out) throws IOException {
-                                                StreamUtils.copy(zipIn, out);
-                                            }
-                                        });
+                                attributes.put(FRAGMENT_INDEX, String.valueOf(++fragmentCount));
+
+                                unpackedFile = session.putAllAttributes(unpackedFile, attributes);
+                                unpackedFile = session.write(unpackedFile, new OutputStreamCallback() {
+                                    @Override
+                                    public void process(final OutputStream out) throws IOException {
+                                        StreamUtils.copy(zipIn, out);
+                                    }
+                                });
                             } finally {
                                 unpacked.add(unpackedFile);
                             }
@@ -388,24 +354,20 @@ public class UnpackContent extends AbstractProcessor {
                             final ObjectHolder<Map<String, String>> attributesRef = new ObjectHolder<>(null);
                             FlowFile unpackedFile = session.create(source);
                             try {
-                                unpackedFile = session.
-                                        write(unpackedFile, new OutputStreamCallback() {
-                                            @Override
-                                            public void process(final OutputStream rawOut) throws IOException {
-                                                try (final OutputStream out = new BufferedOutputStream(rawOut)) {
-                                                    final Map<String, String> attributes = unpackager.
-                                                    unpackageFlowFile(in, out);
-                                                    if (attributes == null) {
-                                                        throw new IOException("Failed to unpack " + source + ": stream had no Attributes");
-                                                    }
-                                                    attributesRef.
-                                                    set(attributes);
-                                                }
+                                unpackedFile = session.write(unpackedFile, new OutputStreamCallback() {
+                                    @Override
+                                    public void process(final OutputStream rawOut) throws IOException {
+                                        try (final OutputStream out = new BufferedOutputStream(rawOut)) {
+                                            final Map<String, String> attributes = unpackager.unpackageFlowFile(in, out);
+                                            if (attributes == null) {
+                                                throw new IOException("Failed to unpack " + source + ": stream had no Attributes");
                                             }
-                                        });
+                                            attributesRef.set(attributes);
+                                        }
+                                    }
+                                });
 
-                                final Map<String, String> attributes = attributesRef.
-                                        get();
+                                final Map<String, String> attributes = attributesRef.get();
 
                                 // Remove the UUID from the attributes because we don't want to use the same UUID for this FlowFile.
                                 // If we do, then we get into a weird situation if we use MergeContent to create a FlowFile Package
@@ -413,24 +375,17 @@ public class UnpackContent extends AbstractProcessor {
                                 attributes.remove(CoreAttributes.UUID.key());
 
                                 // maintain backward compatibility with legacy NiFi attribute names
-                                mapAttributes(attributes, "nf.file.name", CoreAttributes.FILENAME.
-                                        key());
-                                mapAttributes(attributes, "nf.file.path", CoreAttributes.PATH.
-                                        key());
-                                mapAttributes(attributes, "content-encoding", CoreAttributes.MIME_TYPE.
-                                        key());
-                                mapAttributes(attributes, "content-type", CoreAttributes.MIME_TYPE.
-                                        key());
+                                mapAttributes(attributes, "nf.file.name", CoreAttributes.FILENAME.key());
+                                mapAttributes(attributes, "nf.file.path", CoreAttributes.PATH.key());
+                                mapAttributes(attributes, "content-encoding", CoreAttributes.MIME_TYPE.key());
+                                mapAttributes(attributes, "content-type", CoreAttributes.MIME_TYPE.key());
 
                                 if (!attributes.
-                                        containsKey(CoreAttributes.MIME_TYPE.
-                                                key())) {
-                                    attributes.put(CoreAttributes.MIME_TYPE.
-                                            key(), OCTET_STREAM);
+                                        containsKey(CoreAttributes.MIME_TYPE.key())) {
+                                    attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM);
                                 }
 
-                                unpackedFile = session.
-                                        putAllAttributes(unpackedFile, attributes);
+                                unpackedFile = session.putAllAttributes(unpackedFile, attributes);
                             } finally {
                                 unpacked.add(unpackedFile);
                             }
@@ -455,8 +410,7 @@ public class UnpackContent extends AbstractProcessor {
     }
 
     /**
-     * If the unpacked flowfiles contain fragment index attributes, then we need
-     * to apply fragment count and other attributes for completeness.
+     * If the unpacked flowfiles contain fragment index attributes, then we need to apply fragment count and other attributes for completeness.
      *
      * @param session
      * @param source
@@ -474,12 +428,9 @@ public class UnpackContent extends AbstractProcessor {
             }
         }
 
-        String originalFilename = source.getAttribute(CoreAttributes.FILENAME.
-                key());
-        if (originalFilename.endsWith(".tar") || originalFilename.
-                endsWith(".zip") || originalFilename.endsWith(".pkg")) {
-            originalFilename = originalFilename.substring(0, originalFilename.
-                    length() - 4);
+        String originalFilename = source.getAttribute(CoreAttributes.FILENAME.key());
+        if (originalFilename.endsWith(".tar") || originalFilename.endsWith(".zip") || originalFilename.endsWith(".pkg")) {
+            originalFilename = originalFilename.substring(0, originalFilename.length() - 4);
         }
 
         // second pass adds fragment attributes

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
index 40c7e65..ab12be2 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
@@ -59,33 +59,24 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
     @Override
     public void init(final ServletConfig config) throws ServletException {
         final ServletContext context = config.getServletContext();
-        this.processor = (Processor) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_PROCESSOR);
-        this.logger = (ProcessorLog) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_LOGGER);
-        this.authorizedPattern = (Pattern) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN);
-        this.flowFileMap = (ConcurrentMap<String, FlowFileEntryTimeWrapper>) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_FLOWFILE_MAP);
+        this.processor = (Processor) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_PROCESSOR);
+        this.logger = (ProcessorLog) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_LOGGER);
+        this.authorizedPattern = (Pattern) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN);
+        this.flowFileMap = (ConcurrentMap<String, FlowFileEntryTimeWrapper>) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_FLOWFILE_MAP);
     }
 
     @Override
     protected void doDelete(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
-        final X509Certificate[] certs = (X509Certificate[]) request.
-                getAttribute("javax.servlet.request.X509Certificate");
+        final X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate");
         String foundSubject = DEFAULT_FOUND_SUBJECT;
         if (certs != null && certs.length > 0) {
             for (final X509Certificate cert : certs) {
-                foundSubject = cert.getSubjectDN().
-                        getName();
-                if (authorizedPattern.matcher(foundSubject).
-                        matches()) {
+                foundSubject = cert.getSubjectDN().getName();
+                if (authorizedPattern.matcher(foundSubject).matches()) {
                     break;
                 } else {
-                    logger.
-                            warn(processor + " rejecting transfer attempt from " + foundSubject + " because the DN is not authorized");
-                    response.
-                            sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
+                    logger.warn(processor + " rejecting transfer attempt from " + foundSubject + " because the DN is not authorized");
+                    response.sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
                     return;
                 }
             }
@@ -101,10 +92,8 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
         final String uuid = uri.substring(slashIndex + 1, questionIndex);
         final FlowFileEntryTimeWrapper timeWrapper = flowFileMap.remove(uuid);
         if (timeWrapper == null) {
-            logger.
-                    warn("received DELETE for HOLD with ID " + uuid + " from Remote Host: [" + request.
-                            getRemoteHost() + "] Port [" + request.
-                            getRemotePort() + "] SubjectDN [" + foundSubject + "], but no HOLD exists with that ID; sending response with Status Code 404");
+            logger.warn("received DELETE for HOLD with ID " + uuid + " from Remote Host: [" + request.getRemoteHost()
+                    + "] Port [" + request.getRemotePort() + "] SubjectDN [" + foundSubject + "], but no HOLD exists with that ID; sending response with Status Code 404");
             response.sendError(HttpServletResponse.SC_NOT_FOUND);
             return;
         }
@@ -112,8 +101,7 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
         try {
             final Set<FlowFile> flowFiles = timeWrapper.getFlowFiles();
 
-            final long transferTime = System.currentTimeMillis() - timeWrapper.
-                    getEntryTime();
+            final long transferTime = System.currentTimeMillis() - timeWrapper.getEntryTime();
             long totalFlowFileSize = 0;
             for (final FlowFile flowFile : flowFiles) {
                 totalFlowFileSize += flowFile.getSize();
@@ -124,13 +112,11 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
                 seconds = .00000001D;
             }
             final double bytesPerSecond = ((double) totalFlowFileSize / seconds);
-            final String transferRate = FormatUtils.
-                    formatDataSize(bytesPerSecond) + "/sec";
+            final String transferRate = FormatUtils.formatDataSize(bytesPerSecond) + "/sec";
 
-            logger.
-                    info("received {} files/{} bytes from Remote Host: [{}] Port [{}] SubjectDN [{}] in {} milliseconds at a rate of {}; transferring to 'success': {}",
-                            new Object[]{flowFiles.size(), totalFlowFileSize, request.
-                                getRemoteHost(), request.getRemotePort(), foundSubject, transferTime, transferRate, flowFiles});
+            logger.info("received {} files/{} bytes from Remote Host: [{}] Port [{}] SubjectDN [{}] in {} milliseconds at a rate of {}; "
+                    + "transferring to 'success': {}",
+                    new Object[]{flowFiles.size(), totalFlowFileSize, request.getRemoteHost(), request.getRemotePort(), foundSubject, transferTime, transferRate, flowFiles});
 
             final ProcessSession session = timeWrapper.getSession();
             session.transfer(flowFiles, ListenHTTP.RELATIONSHIP_SUCCESS);
@@ -139,12 +125,9 @@ public class ContentAcknowledgmentServlet extends HttpServlet {
             response.setStatus(HttpServletResponse.SC_OK);
             response.flushBuffer();
         } catch (final Throwable t) {
-            timeWrapper.getSession().
-                    rollback();
-            logger.
-                    error("received DELETE for HOLD with ID {} from Remote Host: [{}] Port [{}] SubjectDN [{}], but failed to process the request due to {}",
-                            new Object[]{uuid, request.getRemoteHost(), request.
-                                getRemotePort(), foundSubject, t.toString()});
+            timeWrapper.getSession().rollback();
+            logger.error("received DELETE for HOLD with ID {} from Remote Host: [{}] Port [{}] SubjectDN [{}], but failed to process the request due to {}",
+                    new Object[]{uuid, request.getRemoteHost(), request.getRemotePort(), foundSubject, t.toString()});
             if (logger.isDebugEnabled()) {
                 logger.error("", t);
             }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
index 7e2338a..81986ba 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java
@@ -103,20 +103,13 @@ public class ListenHTTPServlet extends HttpServlet {
     @Override
     public void init(final ServletConfig config) throws ServletException {
         final ServletContext context = config.getServletContext();
-        this.logger = (ProcessorLog) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_LOGGER);
-        this.sessionFactoryHolder = (AtomicReference<ProcessSessionFactory>) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_SESSION_FACTORY_HOLDER);
-        this.processContext = (ProcessContext) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_PROCESS_CONTEXT_HOLDER);
-        this.authorizedPattern = (Pattern) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN);
-        this.headerPattern = (Pattern) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_HEADER_PATTERN);
-        this.flowFileMap = (ConcurrentMap<String, FlowFileEntryTimeWrapper>) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_FLOWFILE_MAP);
-        this.streamThrottler = (StreamThrottler) context.
-                getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_STREAM_THROTTLER);
+        this.logger = (ProcessorLog) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_LOGGER);
+        this.sessionFactoryHolder = (AtomicReference<ProcessSessionFactory>) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_SESSION_FACTORY_HOLDER);
+        this.processContext = (ProcessContext) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_PROCESS_CONTEXT_HOLDER);
+        this.authorizedPattern = (Pattern) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN);
+        this.headerPattern = (Pattern) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_HEADER_PATTERN);
+        this.flowFileMap = (ConcurrentMap<String, FlowFileEntryTimeWrapper>) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_FLOWFILE_MAP);
+        this.streamThrottler = (StreamThrottler) context.getAttribute(ListenHTTP.CONTEXT_ATTRIBUTE_STREAM_THROTTLER);
     }
 
     @Override
@@ -148,15 +141,12 @@ public class ListenHTTPServlet extends HttpServlet {
         try {
             final long n = filesReceived.getAndIncrement() % FILES_BEFORE_CHECKING_DESTINATION_SPACE;
             if (n == 0 || !spaceAvailable.get()) {
-                if (context.getAvailableRelationships().
-                        isEmpty()) {
+                if (context.getAvailableRelationships().isEmpty()) {
                     spaceAvailable.set(false);
                     if (logger.isDebugEnabled()) {
-                        logger.debug("Received request from " + request.
-                                getRemoteHost() + " but no space available; Indicating Service Unavailable");
+                        logger.debug("Received request from " + request.getRemoteHost() + " but no space available; Indicating Service Unavailable");
                     }
-                    response.
-                            sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
+                    response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
                     return;
                 } else {
                     spaceAvailable.set(true);
@@ -164,32 +154,24 @@ public class ListenHTTPServlet extends HttpServlet {
             }
             response.setHeader("Content-Type", MediaType.TEXT_PLAIN);
 
-            final boolean contentGzipped = Boolean.parseBoolean(request.
-                    getHeader(GZIPPED_HEADER));
+            final boolean contentGzipped = Boolean.parseBoolean(request.getHeader(GZIPPED_HEADER));
 
-            final X509Certificate[] certs = (X509Certificate[]) request.
-                    getAttribute("javax.servlet.request.X509Certificate");
+            final X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate");
             foundSubject = DEFAULT_FOUND_SUBJECT;
             if (certs != null && certs.length > 0) {
                 for (final X509Certificate cert : certs) {
-                    foundSubject = cert.getSubjectDN().
-                            getName();
-                    if (authorizedPattern.matcher(foundSubject).
-                            matches()) {
+                    foundSubject = cert.getSubjectDN().getName();
+                    if (authorizedPattern.matcher(foundSubject).matches()) {
                         break;
                     } else {
-                        logger.
-                                warn("Rejecting transfer attempt from " + foundSubject + " because the DN is not authorized, host=" + request.
-                                        getRemoteHost());
-                        response.
-                                sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
+                        logger.warn("Rejecting transfer attempt from " + foundSubject + " because the DN is not authorized, host=" + request.getRemoteHost());
+                        response.sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
                         return;
                     }
                 }
             }
 
-            final String destinationVersion = request.
-                    getHeader(PROTOCOL_VERSION_HEADER);
+            final String destinationVersion = request.getHeader(PROTOCOL_VERSION_HEADER);
             Integer protocolVersion = null;
             if (destinationVersion != null) {
                 try {
@@ -200,19 +182,15 @@ public class ListenHTTPServlet extends HttpServlet {
             }
 
             final boolean destinationIsLegacyNiFi = (protocolVersion == null);
-            final boolean createHold = Boolean.parseBoolean(request.
-                    getHeader(FLOWFILE_CONFIRMATION_HEADER));
+            final boolean createHold = Boolean.parseBoolean(request.getHeader(FLOWFILE_CONFIRMATION_HEADER));
             final String contentType = request.getContentType();
 
-            final InputStream unthrottled = contentGzipped ? new GZIPInputStream(request.
-                    getInputStream()) : request.getInputStream();
+            final InputStream unthrottled = contentGzipped ? new GZIPInputStream(request.getInputStream()) : request.getInputStream();
 
-            final InputStream in = (streamThrottler == null) ? unthrottled : streamThrottler.
-                    newThrottledInputStream(unthrottled);
+            final InputStream in = (streamThrottler == null) ? unthrottled : streamThrottler.newThrottledInputStream(unthrottled);
 
             if (logger.isDebugEnabled()) {
-                logger.
-                        debug("Received request from " + request.getRemoteHost() + ", createHold=" + createHold + ", content-type=" + contentType + ", gzip=" + contentGzipped);
+                logger.debug("Received request from " + request.getRemoteHost() + ", createHold=" + createHold + ", content-type=" + contentType + ", gzip=" + contentGzipped);
             }
 
             final AtomicBoolean hasMoreData = new AtomicBoolean(false);
@@ -241,21 +219,16 @@ public class ListenHTTPServlet extends HttpServlet {
                                 IOUtils.copy(in, bos);
                                 hasMoreData.set(false);
                             } else {
-                                attributes.putAll(unpackager.
-                                        unpackageFlowFile(in, bos));
+                                attributes.putAll(unpackager.unpackageFlowFile(in, bos));
 
                                 if (destinationIsLegacyNiFi) {
                                     if (attributes.containsKey("nf.file.name")) {
                                         // for backward compatibility with old nifi...
-                                        attributes.put(CoreAttributes.FILENAME.
-                                                key(), attributes.
-                                                remove("nf.file.name"));
+                                        attributes.put(CoreAttributes.FILENAME.key(), attributes.remove("nf.file.name"));
                                     }
 
                                     if (attributes.containsKey("nf.file.path")) {
-                                        attributes.
-                                                put(CoreAttributes.PATH.key(), attributes.
-                                                        remove("nf.file.path"));
+                                        attributes.put(CoreAttributes.PATH.key(), attributes.remove("nf.file.path"));
                                     }
                                 }
 
@@ -269,12 +242,10 @@ public class ListenHTTPServlet extends HttpServlet {
                 });
 
                 final long transferNanos = System.nanoTime() - startNanos;
-                final long transferMillis = TimeUnit.MILLISECONDS.
-                        convert(transferNanos, TimeUnit.NANOSECONDS);
+                final long transferMillis = TimeUnit.MILLISECONDS.convert(transferNanos, TimeUnit.NANOSECONDS);
 
                 // put metadata on flowfile
-                final String nameVal = request.
-                        getHeader(CoreAttributes.FILENAME.key());
+                final String nameVal = request.getHeader(CoreAttributes.FILENAME.key());
                 if (StringUtils.isNotBlank(nameVal)) {
                     attributes.put(CoreAttributes.FILENAME.key(), nameVal);
                 }
@@ -283,31 +254,24 @@ public class ListenHTTPServlet extends HttpServlet {
                 for (Enumeration<String> headerEnum = request.getHeaderNames();
                         headerEnum.hasMoreElements();) {
                     String headerName = headerEnum.nextElement();
-                    if (headerPattern != null && headerPattern.
-                            matcher(headerName).
-                            matches()) {
+                    if (headerPattern != null && headerPattern.matcher(headerName).matches()) {
                         String headerValue = request.getHeader(headerName);
                         attributes.put(headerName, headerValue);
                     }
                 }
 
-                String sourceSystemFlowFileIdentifier = attributes.
-                        get(CoreAttributes.UUID.key());
+                String sourceSystemFlowFileIdentifier = attributes.get(CoreAttributes.UUID.key());
                 if (sourceSystemFlowFileIdentifier != null) {
                     sourceSystemFlowFileIdentifier = "urn:nifi:" + sourceSystemFlowFileIdentifier;
 
                     // If we receveied a UUID, we want to give the FlowFile a new UUID and register the sending system's
                     // identifier as the SourceSystemFlowFileIdentifier field in the Provenance RECEIVE event
-                    attributes.put(CoreAttributes.UUID.key(), UUID.randomUUID().
-                            toString());
+                    attributes.put(CoreAttributes.UUID.key(), UUID.randomUUID().toString());
                 }
 
                 flowFile = session.putAllAttributes(flowFile, attributes);
-                session.getProvenanceReporter().
-                        receive(flowFile, request.getRequestURL().
-                                toString(), sourceSystemFlowFileIdentifier, "Remote DN=" + foundSubject, transferMillis);
-                flowFile = session.
-                        putAttribute(flowFile, "restlistener.remote.user.dn", foundSubject);
+                session.getProvenanceReporter().receive(flowFile, request.getRequestURL().toString(), sourceSystemFlowFileIdentifier, "Remote DN=" + foundSubject, transferMillis);
+                flowFile = session.putAttribute(flowFile, "restlistener.remote.user.dn", foundSubject);
                 flowFileSet.add(flowFile);
 
                 if (holdUuid == null) {
@@ -316,45 +280,34 @@ public class ListenHTTPServlet extends HttpServlet {
             } while (hasMoreData.get());
 
             if (createHold) {
-                String uuid = (holdUuid == null) ? UUID.randomUUID().
-                        toString() : holdUuid;
+                String uuid = (holdUuid == null) ? UUID.randomUUID().toString() : holdUuid;
 
                 if (flowFileMap.containsKey(uuid)) {
-                    uuid = UUID.randomUUID().
-                            toString();
+                    uuid = UUID.randomUUID().toString();
                 }
 
-                final FlowFileEntryTimeWrapper wrapper = new FlowFileEntryTimeWrapper(session, flowFileSet, System.
-                        currentTimeMillis());
+                final FlowFileEntryTimeWrapper wrapper = new FlowFileEntryTimeWrapper(session, flowFileSet, System.currentTimeMillis());
                 FlowFileEntryTimeWrapper previousWrapper;
                 do {
                     previousWrapper = flowFileMap.putIfAbsent(uuid, wrapper);
                     if (previousWrapper != null) {
-                        uuid = UUID.randomUUID().
-                                toString();
+                        uuid = UUID.randomUUID().toString();
                     }
                 } while (previousWrapper != null);
 
                 response.setStatus(HttpServletResponse.SC_SEE_OTHER);
                 final String ackUri = ListenHTTP.URI + "/holds/" + uuid;
                 response.addHeader(LOCATION_HEADER_NAME, ackUri);
-                response.
-                        addHeader(LOCATION_URI_INTENT_NAME, LOCATION_URI_INTENT_VALUE);
-                response.getOutputStream().
-                        write(ackUri.getBytes("UTF-8"));
+                response.addHeader(LOCATION_URI_INTENT_NAME, LOCATION_URI_INTENT_VALUE);
+                response.getOutputStream().write(ackUri.getBytes("UTF-8"));
                 if (logger.isDebugEnabled()) {
-                    logger.
-                            debug("Ingested {} from Remote Host: [{}] Port [{}] SubjectDN [{}]; placed hold on these {} files with ID {}",
-                                    new Object[]{flowFileSet, request.
-                                        getRemoteHost(), request.getRemotePort(), foundSubject, flowFileSet.
-                                        size(), uuid});
+                    logger.debug("Ingested {} from Remote Host: [{}] Port [{}] SubjectDN [{}]; placed hold on these {} files with ID {}",
+                            new Object[]{flowFileSet, request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFileSet.size(), uuid});
                 }
             } else {
                 response.setStatus(HttpServletResponse.SC_OK);
-                logger.
-                        info("Received from Remote Host: [{}] Port [{}] SubjectDN [{}]; transferring to 'success' {}",
-                                new Object[]{request.getRemoteHost(), request.
-                                    getRemotePort(), foundSubject, flowFile});
+                logger.info("Received from Remote Host: [{}] Port [{}] SubjectDN [{}]; transferring to 'success' {}",
+                        new Object[]{request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFile});
 
                 session.transfer(flowFileSet, ListenHTTP.RELATIONSHIP_SUCCESS);
                 session.commit();
@@ -362,16 +315,13 @@ public class ListenHTTPServlet extends HttpServlet {
         } catch (final Throwable t) {
             session.rollback();
             if (flowFile == null) {
-                logger.
-                        error("Unable to receive file from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[]{request.
-                            getRemoteHost(), foundSubject, t});
+                logger.error("Unable to receive file from Remote Host: [{}] SubjectDN [{}] due to {}",
+                        new Object[]{request.getRemoteHost(), foundSubject, t});
             } else {
-                logger.
-                        error("Unable to receive file {} from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[]{flowFile, request.
-                            getRemoteHost(), foundSubject, t});
+                logger.error("Unable to receive file {} from Remote Host: [{}] SubjectDN [{}] due to {}",
+                        new Object[]{flowFile, request.getRemoteHost(), foundSubject, t});
             }
-            response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.
-                    toString());
+            response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.toString());
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
index aa5cdc3..c9d906d 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/Bin.java
@@ -46,8 +46,7 @@ public class Bin {
      * @param minEntries
      * @param maxEntries
      * @param fileCountAttribute
-     * @throws IllegalArgumentException if the min is not less than or equal to
-     * the max.
+     * @throws IllegalArgumentException if the min is not less than or equal to the max.
      */
     public Bin(final long minSizeBytes, final long maxSizeBytes, final int minEntries, final int maxEntries, final String fileCountAttribute) {
         this.minimumSizeBytes = minSizeBytes;
@@ -63,11 +62,8 @@ public class Bin {
     }
 
     /**
-     * Indicates whether the bin has enough items to be considered full. This is
-     * based on whether the current size of the bin is greater than the minimum
-     * size in bytes and based on having a number of successive unsuccessful
-     * attempts to add a new item (because it is so close to the max or the size
-     * of the objects being attempted do not favor tight packing)
+     * Indicates whether the bin has enough items to be considered full. This is based on whether the current size of the bin is greater than the minimum size in bytes and based on having a number of
+     * successive unsuccessful attempts to add a new item (because it is so close to the max or the size of the objects being attempted do not favor tight packing)
      *
      * @return true if considered full; false otherwise
      */
@@ -90,8 +86,7 @@ public class Bin {
      *
      * @param duration
      * @param unit
-     * @return true if this bin is older than the length of time given; false
-     * otherwise
+     * @return true if this bin is older than the length of time given; false otherwise
      */
     public boolean isOlderThan(final int duration, final TimeUnit unit) {
         final long ageInNanos = System.nanoTime() - creationMomentEpochNs;
@@ -109,16 +104,14 @@ public class Bin {
     }
 
     /**
-     * If this bin has enough room for the size of the given flow file then it
-     * is added otherwise it is not
+     * If this bin has enough room for the size of the given flow file then it is added otherwise it is not
      *
      * @param flowFile
      * @param session the ProcessSession to which the FlowFile belongs
      * @return true if added; false otherwise
      */
     public boolean offer(final FlowFile flowFile, final ProcessSession session) {
-        if (((size + flowFile.getSize()) > maximumSizeBytes) || (binContents.
-                size() >= maximumEntries)) {
+        if (((size + flowFile.getSize()) > maximumSizeBytes) || (binContents.size() >= maximumEntries)) {
             successiveFailedOfferings++;
             return false;
         }
@@ -144,8 +137,7 @@ public class Bin {
         if (value == null) {
             return null;
         }
-        if (!intPattern.matcher(value).
-                matches()) {
+        if (!intPattern.matcher(value).matches()) {
             return null;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
index eeadfa6..9d0e857 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/BinManager.java
@@ -60,10 +60,8 @@ public class BinManager {
         try {
             for (final List<Bin> binList : groupBinMap.values()) {
                 for (final Bin bin : binList) {
-                    for (final FlowFileSessionWrapper wrapper : bin.
-                            getContents()) {
-                        wrapper.getSession().
-                                rollback();
+                    for (final FlowFileSessionWrapper wrapper : bin.getContents()) {
+                        wrapper.getSession().rollback();
                     }
                 }
             }
@@ -108,15 +106,12 @@ public class BinManager {
     }
 
     /**
-     * Adds the given flowFile to the first available bin in which it fits for
-     * the given group or creates a new bin in the specified group if necessary.
+     * Adds the given flowFile to the first available bin in which it fits for the given group or creates a new bin in the specified group if necessary.
      * <p/>
-     * @param groupIdentifier the group to which the flow file belongs; can be
-     * null
+     * @param groupIdentifier the group to which the flow file belongs; can be null
      * @param flowFile the flow file to bin
      * @param session the ProcessSession to which the FlowFile belongs
-     * @return true if added; false if no bin exists which can fit this item and
-     * no bin can be created based on current min/max criteria
+     * @return true if added; false if no bin exists which can fit this item and no bin can be created based on current min/max criteria
      */
     public boolean offer(final String groupIdentifier, final FlowFile flowFile, final ProcessSession session) {
         final long currentMaxSizeBytes = maxSizeBytes.get();
@@ -128,8 +123,7 @@ public class BinManager {
             final List<Bin> currentBins = groupBinMap.get(groupIdentifier);
             if (currentBins == null) { // this is a new group we need to register
                 final List<Bin> bins = new ArrayList<>();
-                final Bin bin = new Bin(minSizeBytes.get(), currentMaxSizeBytes, minEntries.
-                        get(), maxEntries.get(), fileCountAttribute.get());
+                final Bin bin = new Bin(minSizeBytes.get(), currentMaxSizeBytes, minEntries.get(), maxEntries.get(), fileCountAttribute.get());
                 bins.add(bin);
                 groupBinMap.put(groupIdentifier, bins);
                 binCount++;
@@ -143,8 +137,7 @@ public class BinManager {
                 }
 
                 //if we've reached this point then we couldn't fit it into any existing bins - gotta make a new one
-                final Bin bin = new Bin(minSizeBytes.get(), currentMaxSizeBytes, minEntries.
-                        get(), maxEntries.get(), fileCountAttribute.get());
+                final Bin bin = new Bin(minSizeBytes.get(), currentMaxSizeBytes, minEntries.get(), maxEntries.get(), fileCountAttribute.get());
                 currentBins.add(bin);
                 binCount++;
                 return bin.offer(flowFile, session);
@@ -155,12 +148,10 @@ public class BinManager {
     }
 
     /**
-     * Finds all bins that are considered full and removes them from the
-     * manager.
+     * Finds all bins that are considered full and removes them from the manager.
      * <p/>
-     * @param relaxFullnessConstraint if false will require bins to be full
-     * before considered ready; if true bins only have to meet their minimum
-     * size criteria or be 'old' and then they'll be considered ready
+     * @param relaxFullnessConstraint if false will require bins to be full before considered ready; if true bins only have to meet their minimum size criteria or be 'old' and then they'll be
+     * considered ready
      * @return
      */
     public Collection<Bin> removeReadyBins(boolean relaxFullnessConstraint) {
@@ -169,12 +160,10 @@ public class BinManager {
 
         wLock.lock();
         try {
-            for (final Map.Entry<String, List<Bin>> group : groupBinMap.
-                    entrySet()) {
+            for (final Map.Entry<String, List<Bin>> group : groupBinMap.entrySet()) {
                 final List<Bin> remainingBins = new ArrayList<>();
                 for (final Bin bin : group.getValue()) {
-                    if (relaxFullnessConstraint && (bin.isFullEnough() || bin.
-                            isOlderThan(maxBinAgeSeconds.get(), TimeUnit.SECONDS))) { //relaxed check
+                    if (relaxFullnessConstraint && (bin.isFullEnough() || bin.isOlderThan(maxBinAgeSeconds.get(), TimeUnit.SECONDS))) { //relaxed check
                         readyBins.add(bin);
                     } else if (!relaxFullnessConstraint && bin.isFull()) { //strict check
                         readyBins.add(bin);
@@ -201,8 +190,7 @@ public class BinManager {
             Bin oldestBin = null;
             String oldestBinGroup = null;
 
-            for (final Map.Entry<String, List<Bin>> group : groupBinMap.
-                    entrySet()) {
+            for (final Map.Entry<String, List<Bin>> group : groupBinMap.entrySet()) {
                 for (final Bin bin : group.getValue()) {
                     if (oldestBin == null || bin.isOlderThan(oldestBin)) {
                         oldestBin = bin;
@@ -235,8 +223,7 @@ public class BinManager {
         try {
             for (final List<Bin> bins : groupBinMap.values()) {
                 for (final Bin bin : bins) {
-                    if (bin.
-                            isOlderThan(maxBinAgeSeconds.get(), TimeUnit.SECONDS)) {
+                    if (bin.isOlderThan(maxBinAgeSeconds.get(), TimeUnit.SECONDS)) {
                         return true;
                     }
                 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
index 3131f40..8520813 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/DocumentReaderCallback.java
@@ -36,8 +36,7 @@ public class DocumentReaderCallback implements InputStreamCallback {
     /**
      * Creates a new DocumentReaderCallback .
      *
-     * @param isNamespaceAware Whether or not the parse should consider
-     * namespaces
+     * @param isNamespaceAware Whether or not the parse should consider namespaces
      */
     public DocumentReaderCallback(boolean isNamespaceAware) {
         this.isNamespaceAware = isNamespaceAware;
@@ -52,8 +51,7 @@ public class DocumentReaderCallback implements InputStreamCallback {
     @Override
     public void process(final InputStream stream) throws IOException {
         try {
-            DocumentBuilderFactory factory = DocumentBuilderFactory.
-                    newInstance();
+            DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
             factory.setNamespaceAware(isNamespaceAware);
             DocumentBuilder builder = factory.newDocumentBuilder();
             document = builder.parse(stream);


[08/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
index 0a076ca..21e6b4c 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
@@ -56,54 +56,54 @@ public class FTPTransfer implements FileTransfer {
     public static final String PROXY_TYPE_HTTP = Proxy.Type.HTTP.name();
     public static final String PROXY_TYPE_SOCKS = Proxy.Type.SOCKS.name();
 
-    public static final PropertyDescriptor CONNECTION_MODE = new PropertyDescriptor.Builder().
-            name("Connection Mode").
-            description("The FTP Connection Mode").
-            allowableValues(CONNECTION_MODE_ACTIVE, CONNECTION_MODE_PASSIVE).
-            defaultValue(CONNECTION_MODE_PASSIVE).
-            build();
-    public static final PropertyDescriptor TRANSFER_MODE = new PropertyDescriptor.Builder().
-            name("Transfer Mode").
-            description("The FTP Transfer Mode").
-            allowableValues(TRANSFER_MODE_BINARY, TRANSFER_MODE_ASCII).
-            defaultValue(TRANSFER_MODE_BINARY).
-            build();
-    public static final PropertyDescriptor PORT = new PropertyDescriptor.Builder().
-            name("Port").
-            description("The port that the remote system is listening on for file transfers").
-            addValidator(StandardValidators.PORT_VALIDATOR).
-            required(true).
-            defaultValue("21").
-            build();
-    public static final PropertyDescriptor PROXY_TYPE = new PropertyDescriptor.Builder().
-            name("Proxy Type").
-            description("Proxy type used for file transfers").
-            allowableValues(PROXY_TYPE_DIRECT, PROXY_TYPE_HTTP, PROXY_TYPE_SOCKS).
-            defaultValue(PROXY_TYPE_DIRECT).
-            build();
-    public static final PropertyDescriptor PROXY_HOST = new PropertyDescriptor.Builder().
-            name("Proxy Host").
-            description("The fully qualified hostname or IP address of the proxy server").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            build();
-    public static final PropertyDescriptor PROXY_PORT = new PropertyDescriptor.Builder().
-            name("Proxy Port").
-            description("The port of the proxy server").
-            addValidator(StandardValidators.PORT_VALIDATOR).
-            build();
-    public static final PropertyDescriptor HTTP_PROXY_USERNAME = new PropertyDescriptor.Builder().
-            name("Http Proxy Username").
-            description("Http Proxy Username").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(false).
-            build();
-    public static final PropertyDescriptor HTTP_PROXY_PASSWORD = new PropertyDescriptor.Builder().
-            name("Http Proxy Password").
-            description("Http Proxy Password").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(false).
-            sensitive(true).
-            build();
+    public static final PropertyDescriptor CONNECTION_MODE = new PropertyDescriptor.Builder()
+            .name("Connection Mode")
+            .description("The FTP Connection Mode")
+            .allowableValues(CONNECTION_MODE_ACTIVE, CONNECTION_MODE_PASSIVE)
+            .defaultValue(CONNECTION_MODE_PASSIVE)
+            .build();
+    public static final PropertyDescriptor TRANSFER_MODE = new PropertyDescriptor.Builder()
+            .name("Transfer Mode")
+            .description("The FTP Transfer Mode")
+            .allowableValues(TRANSFER_MODE_BINARY, TRANSFER_MODE_ASCII)
+            .defaultValue(TRANSFER_MODE_BINARY)
+            .build();
+    public static final PropertyDescriptor PORT = new PropertyDescriptor.Builder()
+            .name("Port")
+            .description("The port that the remote system is listening on for file transfers")
+            .addValidator(StandardValidators.PORT_VALIDATOR)
+            .required(true)
+            .defaultValue("21")
+            .build();
+    public static final PropertyDescriptor PROXY_TYPE = new PropertyDescriptor.Builder()
+            .name("Proxy Type")
+            .description("Proxy type used for file transfers")
+            .allowableValues(PROXY_TYPE_DIRECT, PROXY_TYPE_HTTP, PROXY_TYPE_SOCKS)
+            .defaultValue(PROXY_TYPE_DIRECT)
+            .build();
+    public static final PropertyDescriptor PROXY_HOST = new PropertyDescriptor.Builder()
+            .name("Proxy Host")
+            .description("The fully qualified hostname or IP address of the proxy server")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor PROXY_PORT = new PropertyDescriptor.Builder()
+            .name("Proxy Port")
+            .description("The port of the proxy server")
+            .addValidator(StandardValidators.PORT_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor HTTP_PROXY_USERNAME = new PropertyDescriptor.Builder()
+            .name("Http Proxy Username")
+            .description("Http Proxy Username")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(false)
+            .build();
+    public static final PropertyDescriptor HTTP_PROXY_PASSWORD = new PropertyDescriptor.Builder()
+            .name("Http Proxy Password")
+            .description("Http Proxy Password")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(false)
+            .sensitive(true)
+            .build();
 
     private final ProcessorLog logger;
 
@@ -135,8 +135,7 @@ public class FTPTransfer implements FileTransfer {
                 client.disconnect();
             }
         } catch (final Exception ex) {
-            logger.warn("Failed to close FTPClient due to {}", new Object[]{ex.
-                toString()}, ex);
+            logger.warn("Failed to close FTPClient due to {}", new Object[]{ex.toString()}, ex);
         }
         client = null;
     }
@@ -149,13 +148,9 @@ public class FTPTransfer implements FileTransfer {
 
     @Override
     public List<FileInfo> getListing() throws IOException {
-        final String path = ctx.getProperty(FileTransfer.REMOTE_PATH).
-                evaluateAttributeExpressions().
-                getValue();
+        final String path = ctx.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions().getValue();
         final int depth = 0;
-        final int maxResults = ctx.
-                getProperty(FileTransfer.REMOTE_POLL_BATCH_SIZE).
-                asInteger();
+        final int maxResults = ctx.getProperty(FileTransfer.REMOTE_POLL_BATCH_SIZE).asInteger();
         return getListing(path, depth, maxResults);
     }
 
@@ -166,43 +161,27 @@ public class FTPTransfer implements FileTransfer {
         }
 
         if (depth >= 100) {
-            logger.
-                    warn(this + " had to stop recursively searching directories at a recursive depth of " + depth + " to avoid memory issues");
+            logger.warn(this + " had to stop recursively searching directories at a recursive depth of " + depth + " to avoid memory issues");
             return listing;
         }
 
-        final boolean ignoreDottedFiles = ctx.
-                getProperty(FileTransfer.IGNORE_DOTTED_FILES).
-                asBoolean();
-        final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).
-                asBoolean();
-        final String fileFilterRegex = ctx.
-                getProperty(FileTransfer.FILE_FILTER_REGEX).
-                getValue();
-        final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.
-                compile(fileFilterRegex);
-        final String pathFilterRegex = ctx.
-                getProperty(FileTransfer.PATH_FILTER_REGEX).
-                getValue();
-        final Pattern pathPattern = (!recurse || pathFilterRegex == null) ? null : Pattern.
-                compile(pathFilterRegex);
-        final String remotePath = ctx.getProperty(FileTransfer.REMOTE_PATH).
-                evaluateAttributeExpressions().
-                getValue();
+        final boolean ignoreDottedFiles = ctx.getProperty(FileTransfer.IGNORE_DOTTED_FILES).asBoolean();
+        final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).asBoolean();
+        final String fileFilterRegex = ctx.getProperty(FileTransfer.FILE_FILTER_REGEX).getValue();
+        final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex);
+        final String pathFilterRegex = ctx.getProperty(FileTransfer.PATH_FILTER_REGEX).getValue();
+        final Pattern pathPattern = (!recurse || pathFilterRegex == null) ? null : Pattern.compile(pathFilterRegex);
+        final String remotePath = ctx.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions().getValue();
 
         // check if this directory path matches the PATH_FILTER_REGEX
         boolean pathFilterMatches = true;
         if (pathPattern != null) {
             Path reldir = path == null ? Paths.get(".") : Paths.get(path);
             if (remotePath != null) {
-                reldir = Paths.get(remotePath).
-                        relativize(reldir);
+                reldir = Paths.get(remotePath).relativize(reldir);
             }
-            if (reldir != null && !reldir.toString().
-                    isEmpty()) {
-                if (!pathPattern.matcher(reldir.toString().
-                        replace("\\", "/")).
-                        matches()) {
+            if (reldir != null && !reldir.toString().isEmpty()) {
+                if (!pathPattern.matcher(reldir.toString().replace("\\", "/")).matches()) {
                     pathFilterMatches = false;
                 }
             }
@@ -213,14 +192,12 @@ public class FTPTransfer implements FileTransfer {
         int count = 0;
         final FTPFile[] files;
 
-        if (path == null || path.trim().
-                isEmpty()) {
+        if (path == null || path.trim().isEmpty()) {
             files = client.listFiles(".");
         } else {
             files = client.listFiles(path);
         }
-        if (files.length == 0 && path != null && !path.trim().
-                isEmpty()) {
+        if (files.length == 0 && path != null && !path.trim().isEmpty()) {
             // throw exception if directory doesn't exist
             final boolean cdSuccessful = setWorkingDirectory(path);
             if (!cdSuccessful) {
@@ -239,24 +216,20 @@ public class FTPTransfer implements FileTransfer {
             }
 
             final File newFullPath = new File(path, filename);
-            final String newFullForwardPath = newFullPath.getPath().
-                    replace("\\", "/");
+            final String newFullForwardPath = newFullPath.getPath().replace("\\", "/");
 
             if (recurse && file.isDirectory()) {
                 try {
-                    listing.
-                            addAll(getListing(newFullForwardPath, depth + 1, maxResults - count));
+                    listing.addAll(getListing(newFullForwardPath, depth + 1, maxResults - count));
                 } catch (final IOException e) {
-                    logger.
-                            error("Unable to get listing from " + newFullForwardPath + "; skipping this subdirectory");
+                    logger.error("Unable to get listing from " + newFullForwardPath + "; skipping this subdirectory");
                 }
             }
 
             // if is not a directory and is not a link and it matches
             // FILE_FILTER_REGEX - then let's add it
             if (!file.isDirectory() && !file.isSymbolicLink() && pathFilterMatches) {
-                if (pattern == null || pattern.matcher(filename).
-                        matches()) {
+                if (pattern == null || pattern.matcher(filename).matches()) {
                     listing.add(newFileInfo(file, path));
                     count++;
                 }
@@ -275,38 +248,27 @@ public class FTPTransfer implements FileTransfer {
             return null;
         }
         final File newFullPath = new File(path, file.getName());
-        final String newFullForwardPath = newFullPath.getPath().
-                replace("\\", "/");
+        final String newFullForwardPath = newFullPath.getPath().replace("\\", "/");
         StringBuilder perms = new StringBuilder();
-        perms.append(file.
-                hasPermission(FTPFile.USER_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.USER_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.USER_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.GROUP_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.GROUP_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.GROUP_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.WORLD_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.WORLD_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
-        perms.append(file.
-                hasPermission(FTPFile.WORLD_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
+        perms.append(file.hasPermission(FTPFile.USER_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
+        perms.append(file.hasPermission(FTPFile.USER_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
+        perms.append(file.hasPermission(FTPFile.USER_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
+        perms.append(file.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
+        perms.append(file.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
+        perms.append(file.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
+        perms.append(file.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.READ_PERMISSION) ? "r" : "-");
+        perms.append(file.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.WRITE_PERMISSION) ? "w" : "-");
+        perms.append(file.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.EXECUTE_PERMISSION) ? "x" : "-");
 
         FileInfo.Builder builder = new FileInfo.Builder()
-                .filename(file.getName()).
-                fullPathFileName(newFullForwardPath).
-                directory(file.isDirectory()).
-                size(file.getSize()).
-                lastModifiedTime(file.getTimestamp().
-                        getTimeInMillis()).
-                permissions(perms.toString()).
-                owner(file.getUser()).
-                group(file.getGroup());
+                .filename(file.getName())
+                .fullPathFileName(newFullForwardPath)
+                .directory(file.isDirectory())
+                .size(file.getSize())
+                .lastModifiedTime(file.getTimestamp().getTimeInMillis())
+                .permissions(perms.toString())
+                .owner(file.getUser())
+                .group(file.getGroup());
         return builder.build();
     }
 
@@ -359,20 +321,16 @@ public class FTPTransfer implements FileTransfer {
 
     @Override
     public void ensureDirectoryExists(final FlowFile flowFile, final File directoryName) throws IOException {
-        if (directoryName.getParent() != null && !directoryName.getParentFile().
-                equals(new File(File.separator))) {
+        if (directoryName.getParent() != null && !directoryName.getParentFile().equals(new File(File.separator))) {
             ensureDirectoryExists(flowFile, directoryName.getParentFile());
         }
 
-        final String remoteDirectory = directoryName.getAbsolutePath().
-                replace("\\", "/").
-                replaceAll("^.\\:", "");
+        final String remoteDirectory = directoryName.getAbsolutePath().replace("\\", "/").replaceAll("^.\\:", "");
         final FTPClient client = getClient(flowFile);
         final boolean cdSuccessful = setWorkingDirectory(remoteDirectory);
 
         if (!cdSuccessful) {
-            logger.
-                    debug("Remote Directory {} does not exist; creating it", new Object[]{remoteDirectory});
+            logger.debug("Remote Directory {} does not exist; creating it", new Object[]{remoteDirectory});
             if (client.makeDirectory(remoteDirectory)) {
                 logger.debug("Created {}", new Object[]{remoteDirectory});
             } else {
@@ -410,26 +368,19 @@ public class FTPTransfer implements FileTransfer {
             fullPath = workingDir.endsWith("/") ? workingDir + filename : workingDir + "/" + filename;
         }
 
-        String tempFilename = ctx.getProperty(TEMP_FILENAME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
+        String tempFilename = ctx.getProperty(TEMP_FILENAME).evaluateAttributeExpressions(flowFile).getValue();
         if (tempFilename == null) {
-            final boolean dotRename = ctx.getProperty(DOT_RENAME).
-                    asBoolean();
+            final boolean dotRename = ctx.getProperty(DOT_RENAME).asBoolean();
             tempFilename = dotRename ? "." + filename : filename;
         }
 
         final boolean storeSuccessful = client.storeFile(tempFilename, content);
         if (!storeSuccessful) {
-            throw new IOException("Failed to store file " + tempFilename + " to " + fullPath + " due to: " + client.
-                    getReplyString());
+            throw new IOException("Failed to store file " + tempFilename + " to " + fullPath + " due to: " + client.getReplyString());
         }
 
-        final String lastModifiedTime = ctx.getProperty(LAST_MODIFIED_TIME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (lastModifiedTime != null && !lastModifiedTime.trim().
-                isEmpty()) {
+        final String lastModifiedTime = ctx.getProperty(LAST_MODIFIED_TIME).evaluateAttributeExpressions(flowFile).getValue();
+        if (lastModifiedTime != null && !lastModifiedTime.trim().isEmpty()) {
             try {
                 final DateFormat informat = new SimpleDateFormat(FILE_MODIFY_DATE_ATTR_FORMAT, Locale.US);
                 final Date fileModifyTime = informat.parse(lastModifiedTime);
@@ -437,43 +388,32 @@ public class FTPTransfer implements FileTransfer {
                 final String time = outformat.format(fileModifyTime);
                 if (!client.setModificationTime(tempFilename, time)) {
                     // FTP server probably doesn't support MFMT command
-                    logger.
-                            warn("Could not set lastModifiedTime on {} to {}", new Object[]{flowFile, lastModifiedTime});
+                    logger.warn("Could not set lastModifiedTime on {} to {}", new Object[]{flowFile, lastModifiedTime});
                 }
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set lastModifiedTime on {} to {} due to {}", new Object[]{flowFile, lastModifiedTime, e});
+                logger.error("Failed to set lastModifiedTime on {} to {} due to {}", new Object[]{flowFile, lastModifiedTime, e});
             }
         }
-        final String permissions = ctx.getProperty(PERMISSIONS).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
-        if (permissions != null && !permissions.trim().
-                isEmpty()) {
+        final String permissions = ctx.getProperty(PERMISSIONS).evaluateAttributeExpressions(flowFile).getValue();
+        if (permissions != null && !permissions.trim().isEmpty()) {
             try {
                 int perms = numberPermissions(permissions);
                 if (perms >= 0) {
-                    if (!client.sendSiteCommand("chmod " + Integer.
-                            toOctalString(perms) + " " + tempFilename)) {
-                        logger.
-                                warn("Could not set permission on {} to {}", new Object[]{flowFile, permissions});
+                    if (!client.sendSiteCommand("chmod " + Integer.toOctalString(perms) + " " + tempFilename)) {
+                        logger.warn("Could not set permission on {} to {}", new Object[]{flowFile, permissions});
                     }
                 }
             } catch (final Exception e) {
-                logger.
-                        error("Failed to set permission on {} to {} due to {}", new Object[]{flowFile, permissions, e});
+                logger.error("Failed to set permission on {} to {} due to {}", new Object[]{flowFile, permissions, e});
             }
         }
 
         if (!filename.equals(tempFilename)) {
             try {
-                logger.
-                        debug("Renaming remote path from {} to {} for {}", new Object[]{tempFilename, filename, flowFile});
-                final boolean renameSuccessful = client.
-                        rename(tempFilename, filename);
+                logger.debug("Renaming remote path from {} to {} for {}", new Object[]{tempFilename, filename, flowFile});
+                final boolean renameSuccessful = client.rename(tempFilename, filename);
                 if (!renameSuccessful) {
-                    throw new IOException("Failed to rename temporary file " + tempFilename + " to " + fullPath + " due to: " + client.
-                            getReplyString());
+                    throw new IOException("Failed to rename temporary file " + tempFilename + " to " + fullPath + " due to: " + client.getReplyString());
                 }
             } catch (final IOException e) {
                 try {
@@ -495,8 +435,7 @@ public class FTPTransfer implements FileTransfer {
             setWorkingDirectory(path);
         }
         if (!client.deleteFile(remoteFileName)) {
-            throw new IOException("Failed to remove file " + remoteFileName + " due to " + client.
-                    getReplyString());
+            throw new IOException("Failed to remove file " + remoteFileName + " due to " + client.getReplyString());
         }
     }
 
@@ -505,8 +444,7 @@ public class FTPTransfer implements FileTransfer {
         final FTPClient client = getClient(null);
         final boolean success = client.removeDirectory(remoteDirectoryName);
         if (!success) {
-            throw new IOException("Failed to remove directory " + remoteDirectoryName + " due to " + client.
-                    getReplyString());
+            throw new IOException("Failed to remove directory " + remoteDirectoryName + " due to " + client.getReplyString());
         }
     }
 
@@ -525,14 +463,10 @@ public class FTPTransfer implements FileTransfer {
             if (!cmd.isEmpty()) {
                 int result;
                 result = client.sendCommand(cmd);
-                logger.
-                        debug(this + " sent command to the FTP server: " + cmd + " for " + flowFile);
-
-                if (FTPReply.isNegativePermanent(result) || FTPReply.
-                        isNegativeTransient(result)) {
-                    throw new IOException(this + " negative reply back from FTP server cmd: "
-                            + cmd + " reply:" + result + ": " + client.
-                            getReplyString() + " for " + flowFile);
+                logger.debug(this + " sent command to the FTP server: " + cmd + " for " + flowFile);
+
+                if (FTPReply.isNegativePermanent(result) || FTPReply.isNegativeTransient(result)) {
+                    throw new IOException(this + " negative reply back from FTP server cmd: " + cmd + " reply:" + result + ": " + client.getReplyString() + " for " + flowFile);
                 }
             }
         }
@@ -540,9 +474,7 @@ public class FTPTransfer implements FileTransfer {
 
     private FTPClient getClient(final FlowFile flowFile) throws IOException {
         if (client != null) {
-            String desthost = ctx.getProperty(HOSTNAME).
-                    evaluateAttributeExpressions(flowFile).
-                    getValue();
+            String desthost = ctx.getProperty(HOSTNAME).evaluateAttributeExpressions(flowFile).getValue();
             if (remoteHostName.equals(desthost)) {
                 // destination matches so we can keep our current session
                 resetWorkingDirectory();
@@ -553,38 +485,24 @@ public class FTPTransfer implements FileTransfer {
             }
         }
 
-        final Proxy.Type proxyType = Proxy.Type.valueOf(ctx.
-                getProperty(PROXY_TYPE).
-                getValue());
-        final String proxyHost = ctx.getProperty(PROXY_HOST).
-                getValue();
-        final Integer proxyPort = ctx.getProperty(PROXY_PORT).
-                asInteger();
+        final Proxy.Type proxyType = Proxy.Type.valueOf(ctx.getProperty(PROXY_TYPE).getValue());
+        final String proxyHost = ctx.getProperty(PROXY_HOST).getValue();
+        final Integer proxyPort = ctx.getProperty(PROXY_PORT).asInteger();
         FTPClient client;
         if (proxyType == Proxy.Type.HTTP) {
-            client = new FTPHTTPClient(proxyHost, proxyPort, ctx.
-                    getProperty(HTTP_PROXY_USERNAME).
-                    getValue(), ctx.getProperty(HTTP_PROXY_PASSWORD).
-                    getValue());
+            client = new FTPHTTPClient(proxyHost, proxyPort, ctx.getProperty(HTTP_PROXY_USERNAME).getValue(), ctx.getProperty(HTTP_PROXY_PASSWORD).getValue());
         } else {
             client = new FTPClient();
             if (proxyType == Proxy.Type.SOCKS) {
-                client.
-                        setSocketFactory(new SocksProxySocketFactory(new Proxy(proxyType, new InetSocketAddress(proxyHost, proxyPort))));
+                client.setSocketFactory(new SocksProxySocketFactory(new Proxy(proxyType, new InetSocketAddress(proxyHost, proxyPort))));
             }
         }
         this.client = client;
-        client.setDataTimeout(ctx.getProperty(DATA_TIMEOUT).
-                asTimePeriod(TimeUnit.MILLISECONDS).
-                intValue());
-        client.setDefaultTimeout(ctx.getProperty(CONNECTION_TIMEOUT).
-                asTimePeriod(TimeUnit.MILLISECONDS).
-                intValue());
+        client.setDataTimeout(ctx.getProperty(DATA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
+        client.setDefaultTimeout(ctx.getProperty(CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
         client.setRemoteVerificationEnabled(false);
 
-        final String remoteHostname = ctx.getProperty(HOSTNAME).
-                evaluateAttributeExpressions(flowFile).
-                getValue();
+        final String remoteHostname = ctx.getProperty(HOSTNAME).evaluateAttributeExpressions(flowFile).getValue();
         this.remoteHostName = remoteHostname;
         InetAddress inetAddress = null;
         try {
@@ -596,35 +514,26 @@ public class FTPTransfer implements FileTransfer {
             inetAddress = InetAddress.getByName(remoteHostname);
         }
 
-        client.connect(inetAddress, ctx.getProperty(PORT).
-                asInteger());
+        client.connect(inetAddress, ctx.getProperty(PORT).asInteger());
         this.closed = false;
-        client.setDataTimeout(ctx.getProperty(DATA_TIMEOUT).
-                asTimePeriod(TimeUnit.MILLISECONDS).
-                intValue());
-        client.setSoTimeout(ctx.getProperty(CONNECTION_TIMEOUT).
-                asTimePeriod(TimeUnit.MILLISECONDS).
-                intValue());
-
-        final String username = ctx.getProperty(USERNAME).
-                getValue();
-        final String password = ctx.getProperty(PASSWORD).
-                getValue();
+        client.setDataTimeout(ctx.getProperty(DATA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
+        client.setSoTimeout(ctx.getProperty(CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
+
+        final String username = ctx.getProperty(USERNAME).getValue();
+        final String password = ctx.getProperty(PASSWORD).getValue();
         final boolean loggedIn = client.login(username, password);
         if (!loggedIn) {
             throw new IOException("Could not login for user '" + username + "'");
         }
 
-        final String connectionMode = ctx.getProperty(CONNECTION_MODE).
-                getValue();
+        final String connectionMode = ctx.getProperty(CONNECTION_MODE).getValue();
         if (connectionMode.equalsIgnoreCase(CONNECTION_MODE_ACTIVE)) {
             client.enterLocalActiveMode();
         } else {
             client.enterLocalPassiveMode();
         }
 
-        final String transferMode = ctx.getProperty(TRANSFER_MODE).
-                getValue();
+        final String transferMode = ctx.getProperty(TRANSFER_MODE).getValue();
         final int fileType = (transferMode.equalsIgnoreCase(TRANSFER_MODE_ASCII)) ? FTPClient.ASCII_FILE_TYPE : FTPClient.BINARY_FILE_TYPE;
         if (!client.setFileType(fileType)) {
             throw new IOException("Unable to set transfer mode to type " + transferMode);
@@ -638,8 +547,7 @@ public class FTPTransfer implements FileTransfer {
         int number = -1;
         final Pattern rwxPattern = Pattern.compile("^[rwx-]{9}$");
         final Pattern numPattern = Pattern.compile("\\d+");
-        if (rwxPattern.matcher(perms).
-                matches()) {
+        if (rwxPattern.matcher(perms).matches()) {
             number = 0;
             if (perms.charAt(0) == 'r') {
                 number |= 0x100;
@@ -668,8 +576,7 @@ public class FTPTransfer implements FileTransfer {
             if (perms.charAt(8) == 'x') {
                 number |= 0x1;
             }
-        } else if (numPattern.matcher(perms).
-                matches()) {
+        } else if (numPattern.matcher(perms).matches()) {
             try {
                 number = Integer.parseInt(perms, 8);
             } catch (NumberFormatException ignore) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
index fa7722b..0e6a26f 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPUtils.java
@@ -57,47 +57,30 @@ public class FTPUtils {
     public static final String NETWORK_SOCKET_TIMEOUT_KEY = "network.socket.timeout";
 
     /**
-     * Creates a new FTPClient connected to an FTP server. The following
-     * properties must exist:
+     * Creates a new FTPClient connected to an FTP server. The following properties must exist:
      * <ul>Required Properties:
-     * <li>remote.host - The hostname or IP address of the FTP server to connect
-     * to</li>
+     * <li>remote.host - The hostname or IP address of the FTP server to connect to</li>
      * <li>remote.user - The username of the account to authenticate with</li>
-     * <li>remote.password = The password for the username to authenticate
-     * with</li>
+     * <li>remote.password = The password for the username to authenticate with</li>
      * </ul>
      * <ul>Optional Properties:
-     * <li>remote.port - The port on the FTP server to connect to. Defaults to
-     * FTP default.</li>
-     * <li>transfer.mode - The type of transfer for this connection ('ascii',
-     * 'binary'). Defaults to 'binary'</li>
-     * <li>connection.mode - The type of FTP connection to make ('active_local',
-     * 'passive_local'). Defaults to 'active_local'. In active_local the server
-     * initiates 'data connections' to the client where in passive_local the
-     * client initiates 'data connections' to the server.</li>
-     * <li>network.data.timeout - Default is 0. Sets the timeout in milliseconds
-     * for waiting to establish a new 'data connection' (not a control
-     * connection) when in ACTIVE_LOCAL mode. Also, this establishes the amount
-     * of time to wait on read calls on the data connection in either mode. A
-     * value of zero means do not timeout. Users should probably set a value
-     * here unless using very reliable communications links or else risk
-     * indefinite hangs that require a restart.</li>
-     * <li>network.socket.timeout - Default is 0. Sets the timeout in
-     * milliseconds to use when creating a new control channel socket and also a
-     * timeout to set when reading from a control socket. A value of zero means
-     * do not timeout. Users should probably set a value here unless using very
-     * reliable communications links or else risk indefinite hangs that require
-     * a restart.</li>
+     * <li>remote.port - The port on the FTP server to connect to. Defaults to FTP default.</li>
+     * <li>transfer.mode - The type of transfer for this connection ('ascii', 'binary'). Defaults to 'binary'</li>
+     * <li>connection.mode - The type of FTP connection to make ('active_local', 'passive_local'). Defaults to 'active_local'. In active_local the server initiates 'data connections' to the client
+     * where in passive_local the client initiates 'data connections' to the server.</li>
+     * <li>network.data.timeout - Default is 0. Sets the timeout in milliseconds for waiting to establish a new 'data connection' (not a control connection) when in ACTIVE_LOCAL mode. Also, this
+     * establishes the amount of time to wait on read calls on the data connection in either mode. A value of zero means do not timeout. Users should probably set a value here unless using very
+     * reliable communications links or else risk indefinite hangs that require a restart.</li>
+     * <li>network.socket.timeout - Default is 0. Sets the timeout in milliseconds to use when creating a new control channel socket and also a timeout to set when reading from a control socket. A
+     * value of zero means do not timeout. Users should probably set a value here unless using very reliable communications links or else risk indefinite hangs that require a restart.</li>
      * </ul>
      *
      * @param conf
-     * @param monitor if provided will be used to monitor FTP commands processed
-     * but may be null
+     * @param monitor if provided will be used to monitor FTP commands processed but may be null
      * @return FTPClient connected to FTP server as configured
      * @throws NullPointerException if either argument is null
      * @throws IllegalArgumentException if a required property is missing
-     * @throws NumberFormatException if any argument that must be an int cannot
-     * be converted to int
+     * @throws NumberFormatException if any argument that must be an int cannot be converted to int
      * @throws IOException if some problem occurs connecting to FTP server
      */
     public static FTPClient connect(final FTPConfiguration conf, final ProtocolCommandListener monitor) throws IOException {
@@ -112,11 +95,9 @@ public class FTPUtils {
         final String transferModeVal = conf.transferMode;
         final String transferMode = (null == transferModeVal) ? BINARY_TRANSFER_MODE : transferModeVal;
         final String networkDataTimeoutVal = conf.dataTimeout;
-        final int networkDataTimeout = (null == networkDataTimeoutVal) ? 0 : Integer.
-                parseInt(networkDataTimeoutVal);
+        final int networkDataTimeout = (null == networkDataTimeoutVal) ? 0 : Integer.parseInt(networkDataTimeoutVal);
         final String networkSocketTimeoutVal = conf.connectionTimeout;
-        final int networkSocketTimeout = (null == networkSocketTimeoutVal) ? 0 : Integer.
-                parseInt(networkSocketTimeoutVal);
+        final int networkSocketTimeout = (null == networkSocketTimeoutVal) ? 0 : Integer.parseInt(networkSocketTimeoutVal);
 
         final FTPClient client = new FTPClient();
         if (networkDataTimeout > 0) {
@@ -182,60 +163,47 @@ public class FTPUtils {
         @Override
         public void protocolCommandSent(final ProtocolCommandEvent event) {
             if (logger.isDebugEnabled()) {
-                logger.debug(processor + " : " + event.getMessage().
-                        trim());
+                logger.debug(processor + " : " + event.getMessage().trim());
             }
         }
 
         @Override
         public void protocolReplyReceived(final ProtocolCommandEvent event) {
             if (logger.isDebugEnabled()) {
-                logger.debug(processor + " : " + event.getMessage().
-                        trim());
+                logger.debug(processor + " : " + event.getMessage().trim());
             }
         }
 
     }
 
     /**
-     * Handles the logic required to change to the given directory RELATIVE TO
-     * THE CURRENT DIRECTORY which can include creating new directories needed.
+     * Handles the logic required to change to the given directory RELATIVE TO THE CURRENT DIRECTORY which can include creating new directories needed.
      *
-     * This will first attempt to change to the full path of the given directory
-     * outright. If that fails, then it will attempt to change from the top of
-     * the tree of the given directory all the way down to the final leaf node
-     * of the given directory.
+     * This will first attempt to change to the full path of the given directory outright. If that fails, then it will attempt to change from the top of the tree of the given directory all the way
+     * down to the final leaf node of the given directory.
      *
      * @param client - the ftp client with an already active connection
      * @param dirPath - the path to change or create directories to
-     * @param createDirs - if true will attempt to create any missing
-     * directories
+     * @param createDirs - if true will attempt to create any missing directories
      * @param processor - used solely for targetting logging output.
      * @throws IOException if any access problem occurs
      */
     public static void changeWorkingDirectory(final FTPClient client, final String dirPath, final boolean createDirs, final Processor processor) throws IOException {
         final String currentWorkingDirectory = client.printWorkingDirectory();
         final File dir = new File(dirPath);
-        logger.
-                debug(processor + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.
-                        getPath());
+        logger.debug(processor + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.getPath());
         boolean dirExists = false;
-        final String forwardPaths = dir.getPath().
-                replaceAll(Matcher.quoteReplacement("\\"), Matcher.
-                        quoteReplacement("/"));
+        final String forwardPaths = dir.getPath().replaceAll(Matcher.quoteReplacement("\\"), Matcher.quoteReplacement("/"));
         //always use forward paths for long string attempt
         try {
             dirExists = client.changeWorkingDirectory(forwardPaths);
             if (dirExists) {
-                logger.
-                        debug(processor + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'");
+                logger.debug(processor + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'");
             } else {
-                logger.
-                        debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
+                logger.debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
             }
         } catch (final IOException ioe) {
-            logger.
-                    debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
+            logger.debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way.");
         }
         if (!dirExists) {  //coulnd't navigate directly...begin hard work
             final Deque<String> stack = new LinkedList<>();
@@ -256,15 +224,12 @@ public class FTPUtils {
                     exists = false;
                 }
                 if (!exists && createDirs) {
-                    logger.
-                            debug(processor + " creating new directory and changing to it " + dirName);
+                    logger.debug(processor + " creating new directory and changing to it " + dirName);
                     client.makeDirectory(dirName);
-                    if (!(client.makeDirectory(dirName) || client.
-                            changeWorkingDirectory(dirName))) {
+                    if (!(client.makeDirectory(dirName) || client.changeWorkingDirectory(dirName))) {
                         throw new IOException(processor + " could not create and change to newly created directory " + dirName);
                     } else {
-                        logger.
-                                debug(processor + " successfully changed working directory to " + dirName);
+                        logger.debug(processor + " successfully changed working directory to " + dirName);
                     }
                 } else if (!exists) {
                     throw new IOException(processor + " could not change directory to '" + dirName + "' from '" + currentWorkingDirectory + "'");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileInfo.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileInfo.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileInfo.java
index 6605ff6..c57b4e0 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileInfo.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileInfo.java
@@ -67,8 +67,7 @@ public class FileInfo implements Comparable<FileInfo>, Serializable {
     public int hashCode() {
         final int prime = 31;
         int result = 1;
-        result = prime * result + ((fullPathFileName == null) ? 0 : fullPathFileName.
-                hashCode());
+        result = prime * result + ((fullPathFileName == null) ? 0 : fullPathFileName.hashCode());
         return result;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
index 3af00fa..ece0e59 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FileTransfer.java
@@ -50,125 +50,125 @@ public interface FileTransfer extends Closeable {
 
     void ensureDirectoryExists(FlowFile flowFile, File remoteDirectory) throws IOException;
 
-    public static final PropertyDescriptor HOSTNAME = new PropertyDescriptor.Builder().
-            name("Hostname").
-            description("The fully qualified hostname or IP address of the remote system").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(true).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder().
-            name("Username").
-            description("Username").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(true).
-            build();
-    public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder().
-            name("Password").
-            description("Password for the user account").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            required(false).
-            sensitive(true).
-            build();
-    public static final PropertyDescriptor DATA_TIMEOUT = new PropertyDescriptor.Builder().
-            name("Data Timeout").
-            description("Amount of time to wait before timing out while transferring data").
-            required(true).
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            defaultValue("30 sec").
-            build();
-    public static final PropertyDescriptor CONNECTION_TIMEOUT = new PropertyDescriptor.Builder().
-            name("Connection Timeout").
-            description("Amount of time to wait before timing out while creating a connection").
-            required(true).
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            defaultValue("30 sec").
-            build();
-    public static final PropertyDescriptor REMOTE_PATH = new PropertyDescriptor.Builder().
-            name("Remote Path").
-            description("The path on the remote system from which to pull or push files").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor CREATE_DIRECTORY = new PropertyDescriptor.Builder().
-            name("Create Directory").
-            description("Specifies whether or not the remote directory should be created if it does not exist.").
-            required(true).
-            allowableValues("true", "false").
-            defaultValue("false").
-            build();
-
-    public static final PropertyDescriptor USE_COMPRESSION = new PropertyDescriptor.Builder().
-            name("Use Compression").
-            description("Indicates whether or not ZLIB compression should be used when transferring files").
-            allowableValues("true", "false").
-            defaultValue("false").
-            required(true).
-            build();
+    public static final PropertyDescriptor HOSTNAME = new PropertyDescriptor.Builder()
+            .name("Hostname")
+            .description("The fully qualified hostname or IP address of the remote system")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(true)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder()
+            .name("Username")
+            .description("Username")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(true)
+            .build();
+    public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder()
+            .name("Password")
+            .description("Password for the user account")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .required(false)
+            .sensitive(true)
+            .build();
+    public static final PropertyDescriptor DATA_TIMEOUT = new PropertyDescriptor.Builder()
+            .name("Data Timeout")
+            .description("Amount of time to wait before timing out while transferring data")
+            .required(true)
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .defaultValue("30 sec")
+            .build();
+    public static final PropertyDescriptor CONNECTION_TIMEOUT = new PropertyDescriptor.Builder()
+            .name("Connection Timeout")
+            .description("Amount of time to wait before timing out while creating a connection")
+            .required(true)
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .defaultValue("30 sec")
+            .build();
+    public static final PropertyDescriptor REMOTE_PATH = new PropertyDescriptor.Builder()
+            .name("Remote Path")
+            .description("The path on the remote system from which to pull or push files")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor CREATE_DIRECTORY = new PropertyDescriptor.Builder()
+            .name("Create Directory")
+            .description("Specifies whether or not the remote directory should be created if it does not exist.")
+            .required(true)
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .build();
+
+    public static final PropertyDescriptor USE_COMPRESSION = new PropertyDescriptor.Builder()
+            .name("Use Compression")
+            .description("Indicates whether or not ZLIB compression should be used when transferring files")
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .required(true)
+            .build();
 
     // GET-specific properties
-    public static final PropertyDescriptor RECURSIVE_SEARCH = new PropertyDescriptor.Builder().
-            name("Search Recursively").
-            description("If true, will pull files from arbitrarily nested subdirectories; otherwise, will not traverse subdirectories").
-            required(true).
-            defaultValue("false").
-            allowableValues("true", "false").
-            build();
-    public static final PropertyDescriptor FILE_FILTER_REGEX = new PropertyDescriptor.Builder().
-            name("File Filter Regex").
-            description("Provides a Java Regular Expression for filtering Filenames; if a filter is supplied, only files whose names match that Regular Expression will be fetched").
-            required(false).
-            addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR).
-            build();
-    public static final PropertyDescriptor PATH_FILTER_REGEX = new PropertyDescriptor.Builder().
-            name("Path Filter Regex").
-            description("When " + RECURSIVE_SEARCH.getName() + " is true, then only subdirectories whose path matches the given Regular Expression will be scanned").
-            required(false).
-            addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR).
-            build();
-    public static final PropertyDescriptor MAX_SELECTS = new PropertyDescriptor.Builder().
-            name("Max Selects").
-            description("The maximum number of files to pull in a single connection").
-            defaultValue("100").
-            required(true).
-            addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).
-            build();
-    public static final PropertyDescriptor REMOTE_POLL_BATCH_SIZE = new PropertyDescriptor.Builder().
-            name("Remote Poll Batch Size").
-            description("The value specifies how many file paths to find in a given directory on the remote system when doing a file listing. This value in general should not need to be modified but when polling against a remote system with a tremendous number of files this value can be critical.  Setting this value too high can result very poor performance and setting it too low can cause the flow to be slower than normal.").
-            defaultValue("5000").
-            addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR).
-            required(true).
-            build();
-    public static final PropertyDescriptor DELETE_ORIGINAL = new PropertyDescriptor.Builder().
-            name("Delete Original").
-            description("Determines whether or not the file is deleted from the remote system after it has been successfully transferred").
-            defaultValue("true").
-            allowableValues("true", "false").
-            required(true).
-            build();
-    public static final PropertyDescriptor POLLING_INTERVAL = new PropertyDescriptor.Builder().
-            name("Polling Interval").
-            description("Determines how long to wait between fetching the listing for new files").
-            addValidator(StandardValidators.TIME_PERIOD_VALIDATOR).
-            required(true).
-            defaultValue("60 sec").
-            build();
-    public static final PropertyDescriptor IGNORE_DOTTED_FILES = new PropertyDescriptor.Builder().
-            name("Ignore Dotted Files").
-            description("If true, files whose names begin with a dot (\".\") will be ignored").
-            allowableValues("true", "false").
-            defaultValue("true").
-            required(true).
-            build();
-    public static final PropertyDescriptor USE_NATURAL_ORDERING = new PropertyDescriptor.Builder().
-            name("Use Natural Ordering").
-            description("If true, will pull files in the order in which they are naturally listed; otherwise, the order in which the files will be pulled is not defined").
-            allowableValues("true", "false").
-            defaultValue("false").
-            required(true).
-            build();
+    public static final PropertyDescriptor RECURSIVE_SEARCH = new PropertyDescriptor.Builder()
+            .name("Search Recursively")
+            .description("If true, will pull files from arbitrarily nested subdirectories; otherwise, will not traverse subdirectories")
+            .required(true)
+            .defaultValue("false")
+            .allowableValues("true", "false")
+            .build();
+    public static final PropertyDescriptor FILE_FILTER_REGEX = new PropertyDescriptor.Builder()
+            .name("File Filter Regex")
+            .description("Provides a Java Regular Expression for filtering Filenames; if a filter is supplied, only files whose names match that Regular Expression will be fetched")
+            .required(false)
+            .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor PATH_FILTER_REGEX = new PropertyDescriptor.Builder()
+            .name("Path Filter Regex")
+            .description("When " + RECURSIVE_SEARCH.getName() + " is true, then only subdirectories whose path matches the given Regular Expression will be scanned")
+            .required(false)
+            .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor MAX_SELECTS = new PropertyDescriptor.Builder()
+            .name("Max Selects")
+            .description("The maximum number of files to pull in a single connection")
+            .defaultValue("100")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .build();
+    public static final PropertyDescriptor REMOTE_POLL_BATCH_SIZE = new PropertyDescriptor.Builder()
+            .name("Remote Poll Batch Size")
+            .description("The value specifies how many file paths to find in a given directory on the remote system when doing a file listing. This value in general should not need to be modified but when polling against a remote system with a tremendous number of files this value can be critical.  Setting this value too high can result very poor performance and setting it too low can cause the flow to be slower than normal.")
+            .defaultValue("5000")
+            .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+            .required(true)
+            .build();
+    public static final PropertyDescriptor DELETE_ORIGINAL = new PropertyDescriptor.Builder()
+            .name("Delete Original")
+            .description("Determines whether or not the file is deleted from the remote system after it has been successfully transferred")
+            .defaultValue("true")
+            .allowableValues("true", "false")
+            .required(true)
+            .build();
+    public static final PropertyDescriptor POLLING_INTERVAL = new PropertyDescriptor.Builder()
+            .name("Polling Interval")
+            .description("Determines how long to wait between fetching the listing for new files")
+            .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
+            .required(true)
+            .defaultValue("60 sec")
+            .build();
+    public static final PropertyDescriptor IGNORE_DOTTED_FILES = new PropertyDescriptor.Builder()
+            .name("Ignore Dotted Files")
+            .description("If true, files whose names begin with a dot (\".\") will be ignored")
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .required(true)
+            .build();
+    public static final PropertyDescriptor USE_NATURAL_ORDERING = new PropertyDescriptor.Builder()
+            .name("Use Natural Ordering")
+            .description("If true, will pull files in the order in which they are naturally listed; otherwise, the order in which the files will be pulled is not defined")
+            .allowableValues("true", "false")
+            .defaultValue("false")
+            .required(true)
+            .build();
 
     // PUT-specific properties
     public static final String FILE_MODIFY_DATE_ATTR_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
@@ -179,66 +179,66 @@ public interface FileTransfer extends Closeable {
     public static final String CONFLICT_RESOLUTION_REJECT = "REJECT";
     public static final String CONFLICT_RESOLUTION_FAIL = "FAIL";
     public static final String CONFLICT_RESOLUTION_NONE = "NONE";
-    public static final PropertyDescriptor CONFLICT_RESOLUTION = new PropertyDescriptor.Builder().
-            name("Conflict Resolution").
-            description("Determines how to handle the problem of filename collisions").
-            required(true).
-            allowableValues(CONFLICT_RESOLUTION_REPLACE, CONFLICT_RESOLUTION_IGNORE, CONFLICT_RESOLUTION_RENAME, CONFLICT_RESOLUTION_REJECT, CONFLICT_RESOLUTION_FAIL, CONFLICT_RESOLUTION_NONE).
-            defaultValue(CONFLICT_RESOLUTION_NONE).
-            build();
-    public static final PropertyDescriptor REJECT_ZERO_BYTE = new PropertyDescriptor.Builder().
-            name("Reject Zero-Byte Files").
-            description("Determines whether or not Zero-byte files should be rejected without attempting to transfer").
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
-    public static final PropertyDescriptor DOT_RENAME = new PropertyDescriptor.Builder().
-            name("Dot Rename").
-            description("If true, then the filename of the sent file is prepended with a \".\" and then renamed back to the original once the file is completely sent. Otherwise, there is no rename. This property is ignored if the Temporary Filename property is set.").
-            allowableValues("true", "false").
-            defaultValue("true").
-            build();
-    public static final PropertyDescriptor TEMP_FILENAME = new PropertyDescriptor.Builder().
-            name("Temporary Filename").
-            description("If set, the filename of the sent file will be equal to the value specified during the transfer and after successful completion will be renamed to the original filename. If this value is set, the Dot Rename property is ignored.").
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            required(false).
-            build();
-    public static final PropertyDescriptor LAST_MODIFIED_TIME = new PropertyDescriptor.Builder().
-            name("Last Modified Time").
-            description("The lastModifiedTime to assign to the file after transferring it. If not set, the lastModifiedTime will not be changed. Format must be yyyy-MM-dd'T'HH:mm:ssZ. You may also use expression language such as ${file.lastModifiedTime}. If the value is invalid, the processor will not be invalid but will fail to change lastModifiedTime of the file.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor PERMISSIONS = new PropertyDescriptor.Builder().
-            name("Permissions").
-            description("The permissions to assign to the file after transferring it. Format must be either UNIX rwxrwxrwx with a - in place of denied permissions (e.g. rw-r--r--) or an octal number (e.g. 644). If not set, the permissions will not be changed. You may also use expression language such as ${file.permissions}. If the value is invalid, the processor will not be invalid but will fail to change permissions of the file.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor REMOTE_OWNER = new PropertyDescriptor.Builder().
-            name("Remote Owner").
-            description("Integer value representing the User ID to set on the file after transferring it. If not set, the owner will not be set. You may also use expression language such as ${file.owner}. If the value is invalid, the processor will not be invalid but will fail to change the owner of the file.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor REMOTE_GROUP = new PropertyDescriptor.Builder().
-            name("Remote Group").
-            description("Integer value representing the Group ID to set on the file after transferring it. If not set, the group will not be set. You may also use expression language such as ${file.group}. If the value is invalid, the processor will not be invalid but will fail to change the group of the file.").
-            required(false).
-            addValidator(StandardValidators.NON_EMPTY_VALIDATOR).
-            expressionLanguageSupported(true).
-            build();
-    public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder().
-            name("Batch Size").
-            description("The maximum number of FlowFiles to send in a single connection").
-            required(true).
-            addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).
-            defaultValue("500").
-            build();
+    public static final PropertyDescriptor CONFLICT_RESOLUTION = new PropertyDescriptor.Builder()
+            .name("Conflict Resolution")
+            .description("Determines how to handle the problem of filename collisions")
+            .required(true)
+            .allowableValues(CONFLICT_RESOLUTION_REPLACE, CONFLICT_RESOLUTION_IGNORE, CONFLICT_RESOLUTION_RENAME, CONFLICT_RESOLUTION_REJECT, CONFLICT_RESOLUTION_FAIL, CONFLICT_RESOLUTION_NONE)
+            .defaultValue(CONFLICT_RESOLUTION_NONE)
+            .build();
+    public static final PropertyDescriptor REJECT_ZERO_BYTE = new PropertyDescriptor.Builder()
+            .name("Reject Zero-Byte Files")
+            .description("Determines whether or not Zero-byte files should be rejected without attempting to transfer")
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
+    public static final PropertyDescriptor DOT_RENAME = new PropertyDescriptor.Builder()
+            .name("Dot Rename")
+            .description("If true, then the filename of the sent file is prepended with a \".\" and then renamed back to the original once the file is completely sent. Otherwise, there is no rename. This property is ignored if the Temporary Filename property is set.")
+            .allowableValues("true", "false")
+            .defaultValue("true")
+            .build();
+    public static final PropertyDescriptor TEMP_FILENAME = new PropertyDescriptor.Builder()
+            .name("Temporary Filename")
+            .description("If set, the filename of the sent file will be equal to the value specified during the transfer and after successful completion will be renamed to the original filename. If this value is set, the Dot Rename property is ignored.")
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .required(false)
+            .build();
+    public static final PropertyDescriptor LAST_MODIFIED_TIME = new PropertyDescriptor.Builder()
+            .name("Last Modified Time")
+            .description("The lastModifiedTime to assign to the file after transferring it. If not set, the lastModifiedTime will not be changed. Format must be yyyy-MM-dd'T'HH:mm:ssZ. You may also use expression language such as ${file.lastModifiedTime}. If the value is invalid, the processor will not be invalid but will fail to change lastModifiedTime of the file.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor PERMISSIONS = new PropertyDescriptor.Builder()
+            .name("Permissions")
+            .description("The permissions to assign to the file after transferring it. Format must be either UNIX rwxrwxrwx with a - in place of denied permissions (e.g. rw-r--r--) or an octal number (e.g. 644). If not set, the permissions will not be changed. You may also use expression language such as ${file.permissions}. If the value is invalid, the processor will not be invalid but will fail to change permissions of the file.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor REMOTE_OWNER = new PropertyDescriptor.Builder()
+            .name("Remote Owner")
+            .description("Integer value representing the User ID to set on the file after transferring it. If not set, the owner will not be set. You may also use expression language such as ${file.owner}. If the value is invalid, the processor will not be invalid but will fail to change the owner of the file.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor REMOTE_GROUP = new PropertyDescriptor.Builder()
+            .name("Remote Group")
+            .description("Integer value representing the Group ID to set on the file after transferring it. If not set, the group will not be set. You may also use expression language such as ${file.group}. If the value is invalid, the processor will not be invalid but will fail to change the group of the file.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .build();
+    public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder()
+            .name("Batch Size")
+            .description("The maximum number of FlowFiles to send in a single connection")
+            .required(true)
+            .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
+            .defaultValue("500")
+            .build();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/d29a2d68/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsFactory.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsFactory.java b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsFactory.java
index 0f50cdf..35a65dc 100644
--- a/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsFactory.java
+++ b/nifi/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/JmsFactory.java
@@ -104,13 +104,9 @@ public class JmsFactory {
 
         final ConnectionFactory connectionFactory = createConnectionFactory(context);
 
-        final String username = context.getProperty(USERNAME).
-                getValue();
-        final String password = context.getProperty(PASSWORD).
-                getValue();
-        final Connection connection = (username == null && password == null) ? connectionFactory.
-                createConnection()
-                : connectionFactory.createConnection(username, password);
+        final String username = context.getProperty(USERNAME).getValue();
+        final String password = context.getProperty(PASSWORD).getValue();
+        final Connection connection = (username == null && password == null) ? connectionFactory.createConnection() : connectionFactory.createConnection(username, password);
 
         connection.setClientID(clientId);
         connection.start();
@@ -119,17 +115,12 @@ public class JmsFactory {
 
     public static Connection createConnection(final String url, final String jmsProvider, final String username, final String password, final int timeoutMillis) throws JMSException {
         final ConnectionFactory connectionFactory = createConnectionFactory(url, timeoutMillis, jmsProvider);
-        return (username == null && password == null) ? connectionFactory.
-                createConnection() : connectionFactory.
-                createConnection(username, password);
+        return (username == null && password == null) ? connectionFactory.createConnection() : connectionFactory.createConnection(username, password);
     }
 
     public static String createClientId(final ProcessContext context) {
-        final String clientIdPrefix = context.getProperty(CLIENT_ID_PREFIX).
-                getValue();
-        return CLIENT_ID_FIXED_PREFIX + (clientIdPrefix == null ? "" : clientIdPrefix) + "-" + UUID.
-                randomUUID().
-                toString();
+        final String clientIdPrefix = context.getProperty(CLIENT_ID_PREFIX).getValue();
+        return CLIENT_ID_FIXED_PREFIX + (clientIdPrefix == null ? "" : clientIdPrefix) + "-" + UUID.randomUUID().toString();
     }
 
     public static boolean clientIdPrefixEquals(final String one, final String two) {
@@ -138,14 +129,11 @@ public class JmsFactory {
         } else if (two == null) {
             return false;
         }
-        int uuidLen = UUID.randomUUID().
-                toString().
-                length();
+        int uuidLen = UUID.randomUUID().toString().length();
         if (one.length() <= uuidLen || two.length() <= uuidLen) {
             return false;
         }
-        return one.substring(0, one.length() - uuidLen).
-                equals(two.substring(0, two.length() - uuidLen));
+        return one.substring(0, one.length() - uuidLen).equals(two.substring(0, two.length() - uuidLen));
     }
 
     public static byte[] createByteArray(final Message message) throws JMSException {
@@ -164,8 +152,7 @@ public class JmsFactory {
     }
 
     private static byte[] getMessageBytes(TextMessage message) throws JMSException {
-        return (message.getText() == null) ? new byte[0] : message.getText().
-                getBytes();
+        return (message.getText() == null) ? new byte[0] : message.getText().getBytes();
     }
 
     private static byte[] getMessageBytes(BytesMessage message) throws JMSException {
@@ -204,8 +191,7 @@ public class JmsFactory {
             String key = (String) elements.nextElement();
             map.put(key, message.getString(key));
         }
-        return map.toString().
-                getBytes();
+        return map.toString().getBytes();
     }
 
     private static byte[] getMessageBytes(ObjectMessage message) throws JMSException {
@@ -224,9 +210,7 @@ public class JmsFactory {
     }
 
     public static Session createSession(final ProcessContext context, final Connection connection, final boolean transacted) throws JMSException {
-        final String configuredAckMode = context.
-                getProperty(ACKNOWLEDGEMENT_MODE).
-                getValue();
+        final String configuredAckMode = context.getProperty(ACKNOWLEDGEMENT_MODE).getValue();
         return createSession(connection, configuredAckMode, transacted);
     }
 
@@ -247,14 +231,11 @@ public class JmsFactory {
         Session jmsSession = null;
         try {
             connection = JmsFactory.createConnection(context);
-            jmsSession = JmsFactory.
-                    createSession(context, connection, DEFAULT_IS_TRANSACTED);
+            jmsSession = JmsFactory.createSession(context, connection, DEFAULT_IS_TRANSACTED);
 
-            final String messageSelector = context.getProperty(MESSAGE_SELECTOR).
-                    getValue();
+            final String messageSelector = context.getProperty(MESSAGE_SELECTOR).getValue();
             final Destination destination = createQueue(context);
-            final MessageConsumer messageConsumer = jmsSession.
-                    createConsumer(destination, messageSelector, false);
+            final MessageConsumer messageConsumer = jmsSession.createConsumer(destination, messageSelector, false);
 
             return new WrappedMessageConsumer(connection, jmsSession, messageConsumer);
         } catch (JMSException e) {
@@ -280,20 +261,15 @@ public class JmsFactory {
         Session jmsSession = null;
         try {
             connection = JmsFactory.createConnection(context, clientId);
-            jmsSession = JmsFactory.
-                    createSession(context, connection, DEFAULT_IS_TRANSACTED);
+            jmsSession = JmsFactory.createSession(context, connection, DEFAULT_IS_TRANSACTED);
 
-            final String messageSelector = context.getProperty(MESSAGE_SELECTOR).
-                    getValue();
+            final String messageSelector = context.getProperty(MESSAGE_SELECTOR).getValue();
             final Topic topic = createTopic(context);
             final MessageConsumer messageConsumer;
-            if (context.getProperty(DURABLE_SUBSCRIPTION).
-                    asBoolean()) {
-                messageConsumer = jmsSession.
-                        createDurableSubscriber(topic, clientId, messageSelector, false);
+            if (context.getProperty(DURABLE_SUBSCRIPTION).asBoolean()) {
+                messageConsumer = jmsSession.createDurableSubscriber(topic, clientId, messageSelector, false);
             } else {
-                messageConsumer = jmsSession.
-                        createConsumer(topic, messageSelector, false);
+                messageConsumer = jmsSession.createConsumer(topic, messageSelector, false);
             }
 
             return new WrappedMessageConsumer(connection, jmsSession, messageConsumer);
@@ -309,8 +285,7 @@ public class JmsFactory {
     }
 
     private static Destination getDestination(final ProcessContext context) throws JMSException {
-        final String destinationType = context.getProperty(DESTINATION_TYPE).
-                getValue();
+        final String destinationType = context.getProperty(DESTINATION_TYPE).getValue();
         switch (destinationType) {
             case DESTINATION_TYPE_TOPIC:
                 return createTopic(context);
@@ -330,12 +305,10 @@ public class JmsFactory {
 
         try {
             connection = JmsFactory.createConnection(context);
-            jmsSession = JmsFactory.
-                    createSession(context, connection, transacted);
+            jmsSession = JmsFactory.createSession(context, connection, transacted);
 
             final Destination destination = getDestination(context);
-            final MessageProducer messageProducer = jmsSession.
-                    createProducer(destination);
+            final MessageProducer messageProducer = jmsSession.createProducer(destination);
 
             return new WrappedMessageProducer(connection, jmsSession, messageProducer);
         } catch (JMSException e) {
@@ -350,13 +323,11 @@ public class JmsFactory {
     }
 
     public static Destination createQueue(final ProcessContext context) {
-        return createQueue(context, context.getProperty(DESTINATION_NAME).
-                getValue());
+        return createQueue(context, context.getProperty(DESTINATION_NAME).getValue());
     }
 
     public static Queue createQueue(final ProcessContext context, final String queueName) {
-        return createQueue(context.getProperty(JMS_PROVIDER).
-                getValue(), queueName);
+        return createQueue(context.getProperty(JMS_PROVIDER).getValue(), queueName);
     }
 
     public static Queue createQueue(final String jmsProvider, final String queueName) {
@@ -368,10 +339,8 @@ public class JmsFactory {
     }
 
     private static Topic createTopic(final ProcessContext context) {
-        final String topicName = context.getProperty(DESTINATION_NAME).
-                getValue();
-        switch (context.getProperty(JMS_PROVIDER).
-                getValue()) {
+        final String topicName = context.getProperty(DESTINATION_NAME).getValue();
+        switch (context.getProperty(JMS_PROVIDER).getValue()) {
             case ACTIVEMQ_PROVIDER:
             default:
                 return new ActiveMQTopic(topicName);
@@ -379,13 +348,9 @@ public class JmsFactory {
     }
 
     private static ConnectionFactory createConnectionFactory(final ProcessContext context) throws JMSException {
-        final String url = context.getProperty(URL).
-                getValue();
-        final int timeoutMillis = context.getProperty(TIMEOUT).
-                asTimePeriod(TimeUnit.MILLISECONDS).
-                intValue();
-        final String provider = context.getProperty(JMS_PROVIDER).
-                getValue();
+        final String url = context.getProperty(URL).getValue();
+        final int timeoutMillis = context.getProperty(TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue();
+        final String provider = context.getProperty(JMS_PROVIDER).getValue();
         return createConnectionFactory(url, timeoutMillis, provider);
     }
 
@@ -412,8 +377,7 @@ public class JmsFactory {
 
             if (value == null) {
                 attributes.put(ATTRIBUTE_PREFIX + propName, "");
-                attributes.
-                        put(ATTRIBUTE_PREFIX + propName + ATTRIBUTE_TYPE_SUFFIX, "Unknown");
+                attributes.put(ATTRIBUTE_PREFIX + propName + ATTRIBUTE_TYPE_SUFFIX, "Unknown");
                 continue;
             }
 
@@ -441,42 +405,30 @@ public class JmsFactory {
                 propType = PROP_TYPE_OBJECT;
             }
 
-            attributes.
-                    put(ATTRIBUTE_PREFIX + propName + ATTRIBUTE_TYPE_SUFFIX, propType);
+            attributes.put(ATTRIBUTE_PREFIX + propName + ATTRIBUTE_TYPE_SUFFIX, propType);
         }
 
         if (message.getJMSCorrelationID() != null) {
-            attributes.put(ATTRIBUTE_PREFIX + JMS_CORRELATION_ID, message.
-                    getJMSCorrelationID());
+            attributes.put(ATTRIBUTE_PREFIX + JMS_CORRELATION_ID, message.getJMSCorrelationID());
         }
         if (message.getJMSDestination() != null) {
-            attributes.put(ATTRIBUTE_PREFIX + JMS_DESTINATION, message.
-                    getJMSDestination().
-                    toString());
+            attributes.put(ATTRIBUTE_PREFIX + JMS_DESTINATION, message.getJMSDestination().toString());
         }
         if (message.getJMSMessageID() != null) {
-            attributes.put(ATTRIBUTE_PREFIX + JMS_MESSAGE_ID, message.
-                    getJMSMessageID());
+            attributes.put(ATTRIBUTE_PREFIX + JMS_MESSAGE_ID, message.getJMSMessageID());
         }
         if (message.getJMSReplyTo() != null) {
-            attributes.put(ATTRIBUTE_PREFIX + JMS_REPLY_TO, message.
-                    getJMSReplyTo().
-                    toString());
+            attributes.put(ATTRIBUTE_PREFIX + JMS_REPLY_TO, message.getJMSReplyTo().toString());
         }
         if (message.getJMSType() != null) {
             attributes.put(ATTRIBUTE_PREFIX + JMS_TYPE, message.getJMSType());
         }
 
-        attributes.put(ATTRIBUTE_PREFIX + JMS_DELIVERY_MODE, String.
-                valueOf(message.getJMSDeliveryMode()));
-        attributes.put(ATTRIBUTE_PREFIX + JMS_EXPIRATION, String.
-                valueOf(message.getJMSExpiration()));
-        attributes.put(ATTRIBUTE_PREFIX + JMS_PRIORITY, String.valueOf(message.
-                getJMSPriority()));
-        attributes.put(ATTRIBUTE_PREFIX + JMS_REDELIVERED, String.
-                valueOf(message.getJMSRedelivered()));
-        attributes.put(ATTRIBUTE_PREFIX + JMS_TIMESTAMP, String.valueOf(message.
-                getJMSTimestamp()));
+        attributes.put(ATTRIBUTE_PREFIX + JMS_DELIVERY_MODE, String.valueOf(message.getJMSDeliveryMode()));
+        attributes.put(ATTRIBUTE_PREFIX + JMS_EXPIRATION, String.valueOf(message.getJMSExpiration()));
+        attributes.put(ATTRIBUTE_PREFIX + JMS_PRIORITY, String.valueOf(message.getJMSPriority()));
+        attributes.put(ATTRIBUTE_PREFIX + JMS_REDELIVERED, String.valueOf(message.getJMSRedelivered()));
+        attributes.put(ATTRIBUTE_PREFIX + JMS_TIMESTAMP, String.valueOf(message.getJMSTimestamp()));
         return attributes;
     }
 }


[35/50] [abbrv] incubator-nifi git commit: NIFI-271

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardControllerServiceDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardControllerServiceDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardControllerServiceDAO.java
index dd9fc0d..486e6d7 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardControllerServiceDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardControllerServiceDAO.java
@@ -37,12 +37,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
 
     private ControllerServiceProvider serviceProvider;
 
-    /**
-     * Locates the specified controller service.
-     *
-     * @param controllerServiceId
-     * @return
-     */
     private ControllerServiceNode locateControllerService(final String controllerServiceId) {
         // get the controller service
         final ControllerServiceNode controllerService = serviceProvider.getControllerServiceNode(controllerServiceId);
@@ -55,12 +49,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         return controllerService;
     }
 
-    /**
-     * Creates a controller service.
-     *
-     * @param controllerServiceDTO The controller service DTO
-     * @return The controller service
-     */
     @Override
     public ControllerServiceNode createControllerService(final ControllerServiceDTO controllerServiceDTO) {
         // ensure the type is specified
@@ -72,7 +60,7 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
             // create the controller service
             final ControllerServiceNode controllerService = serviceProvider.createControllerService(controllerServiceDTO.getType(), controllerServiceDTO.getId(), true);
 
-            // ensure we can perform the update 
+            // ensure we can perform the update
             verifyUpdate(controllerService, controllerServiceDTO);
 
             // perform the update
@@ -84,50 +72,27 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         }
     }
 
-    /**
-     * Gets the specified controller service.
-     *
-     * @param controllerServiceId The controller service id
-     * @return The controller service
-     */
     @Override
     public ControllerServiceNode getControllerService(final String controllerServiceId) {
         return locateControllerService(controllerServiceId);
     }
 
-    /**
-     * Determines if the specified controller service exists.
-     *
-     * @param controllerServiceId
-     * @return
-     */
     @Override
     public boolean hasControllerService(final String controllerServiceId) {
         return serviceProvider.getControllerServiceNode(controllerServiceId) != null;
     }
 
-    /**
-     * Gets all of the controller services.
-     *
-     * @return The controller services
-     */
     @Override
     public Set<ControllerServiceNode> getControllerServices() {
         return serviceProvider.getAllControllerServices();
     }
 
-    /**
-     * Updates the specified controller service.
-     *
-     * @param controllerServiceDTO The controller service DTO
-     * @return The controller service
-     */
     @Override
     public ControllerServiceNode updateControllerService(final ControllerServiceDTO controllerServiceDTO) {
         // get the controller service
         final ControllerServiceNode controllerService = locateControllerService(controllerServiceDTO.getId());
 
-        // ensure we can perform the update 
+        // ensure we can perform the update
         verifyUpdate(controllerService, controllerServiceDTO);
 
         // perform the update
@@ -151,7 +116,8 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
     }
 
     @Override
-    public ControllerServiceReference updateControllerServiceReferencingComponents(final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) {
+    public ControllerServiceReference updateControllerServiceReferencingComponents(
+            final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) {
         // get the controller service
         final ControllerServiceNode controllerService = locateControllerService(controllerServiceId);
 
@@ -173,14 +139,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         return controllerService.getReferences();
     }
 
-    /**
-     * Validates the specified configuration for the specified controller
-     * service.
-     *
-     * @param controllerService
-     * @param controllerServiceDTO
-     * @return
-     */
     private List<String> validateProposedConfiguration(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) {
         final List<String> validationErrors = new ArrayList<>();
         return validationErrors;
@@ -217,12 +175,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         }
     }
 
-    /**
-     * Verifies the controller service can be updated.
-     *
-     * @param controllerService
-     * @param controllerServiceDTO
-     */
     private void verifyUpdate(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) {
         // validate the new controller service state if appropriate
         if (isNotNull(controllerServiceDTO.getState())) {
@@ -269,12 +221,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         }
     }
 
-    /**
-     * Configures the specified controller service.
-     *
-     * @param controllerService
-     * @param controllerServiceDTO
-     */
     private void configureControllerService(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) {
         final String name = controllerServiceDTO.getName();
         final String annotationData = controllerServiceDTO.getAnnotationData();
@@ -303,11 +249,6 @@ public class StandardControllerServiceDAO extends ComponentDAO implements Contro
         }
     }
 
-    /**
-     * Deletes the specified controller service.
-     *
-     * @param controllerServiceId The controller service id
-     */
     @Override
     public void deleteControllerService(String controllerServiceId) {
         final ControllerServiceNode controllerService = locateControllerService(controllerServiceId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardFunnelDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardFunnelDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardFunnelDAO.java
index d54e2e6..2be8e26 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardFunnelDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardFunnelDAO.java
@@ -30,24 +30,10 @@ public class StandardFunnelDAO extends ComponentDAO implements FunnelDAO {
 
     private FlowController flowController;
 
-    /**
-     * Locates the specified funnel.
-     *
-     * @param groupId
-     * @param funnelId
-     * @return
-     */
     private Funnel locateFunnel(String groupId, String funnelId) {
         return locateFunnel(locateProcessGroup(flowController, groupId), funnelId);
     }
 
-    /**
-     * Locates the specified funnel.
-     *
-     * @param group
-     * @param funnelId
-     * @return
-     */
     private Funnel locateFunnel(ProcessGroup group, String funnelId) {
         // get the funnel
         Funnel funnel = group.getFunnel(funnelId);
@@ -60,12 +46,6 @@ public class StandardFunnelDAO extends ComponentDAO implements FunnelDAO {
         return funnel;
     }
 
-    /**
-     * Creates a funnel.
-     *
-     * @param funnelDTO The funnel DTO
-     * @return The funnel
-     */
     @Override
     public Funnel createFunnel(String groupId, FunnelDTO funnelDTO) {
         if (funnelDTO.getParentGroupId() != null && !flowController.areGroupsSame(groupId, funnelDTO.getParentGroupId())) {
@@ -87,23 +67,11 @@ public class StandardFunnelDAO extends ComponentDAO implements FunnelDAO {
         return funnel;
     }
 
-    /**
-     * Gets the specified funnel.
-     *
-     * @param funnelId The funnel id
-     * @return The funnel
-     */
     @Override
     public Funnel getFunnel(String groupId, String funnelId) {
         return locateFunnel(groupId, funnelId);
     }
 
-    /**
-     * Determines if the specified funnel exists.
-     *
-     * @param funnelId
-     * @return
-     */
     @Override
     public boolean hasFunnel(String groupId, String funnelId) {
         ProcessGroup group;
@@ -116,23 +84,12 @@ public class StandardFunnelDAO extends ComponentDAO implements FunnelDAO {
         return group.getFunnel(funnelId) != null;
     }
 
-    /**
-     * Gets all of the funnels.
-     *
-     * @return The funnels
-     */
     @Override
     public Set<Funnel> getFunnels(String groupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
         return group.getFunnels();
     }
 
-    /**
-     * Updates the specified funnel.
-     *
-     * @param funnelDTO The funnel DTO
-     * @return The funnel
-     */
     @Override
     public Funnel updateFunnel(String groupId, FunnelDTO funnelDTO) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -157,11 +114,6 @@ public class StandardFunnelDAO extends ComponentDAO implements FunnelDAO {
         funnel.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified funnel.
-     *
-     * @param funnelId The funnel id
-     */
     @Override
     public void deleteFunnel(String groupId, String funnelId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardInputPortDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardInputPortDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardInputPortDAO.java
index 6e79956..fd133a5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardInputPortDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardInputPortDAO.java
@@ -51,12 +51,6 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         return port;
     }
 
-    /**
-     * Creates a port.
-     *
-     * @param portDTO The port DTO
-     * @return The port
-     */
     @Override
     public Port createPort(String groupId, PortDTO portDTO) {
         if (isNotNull(portDTO.getParentGroupId()) && !flowController.areGroupsSame(groupId, portDTO.getParentGroupId())) {
@@ -93,23 +87,11 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         return port;
     }
 
-    /**
-     * Gets the specified port.
-     *
-     * @param portId The port id
-     * @return The port
-     */
     @Override
     public Port getPort(String groupId, String portId) {
         return locatePort(groupId, portId);
     }
 
-    /**
-     * Determines if the specified port exists.
-     *
-     * @param portId
-     * @return
-     */
     @Override
     public boolean hasPort(String groupId, String portId) {
         ProcessGroup group = flowController.getGroup(groupId);
@@ -121,11 +103,6 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         return group.getInputPort(portId) != null;
     }
 
-    /**
-     * Gets all of the ports.
-     *
-     * @return The ports
-     */
     @Override
     public Set<Port> getPorts(String groupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -187,13 +164,6 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         }
     }
 
-    /**
-     * Validates the proposed processor configuration.
-     *
-     * @param processorNode
-     * @param config
-     * @return
-     */
     private List<String> validateProposedConfiguration(PortDTO portDTO) {
         List<String> validationErrors = new ArrayList<>();
 
@@ -207,12 +177,6 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         return validationErrors;
     }
 
-    /**
-     * Updates the specified port.
-     *
-     * @param portDTO The port DTO
-     * @return The port
-     */
     @Override
     public Port updatePort(String groupId, PortDTO portDTO) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -290,11 +254,6 @@ public class StandardInputPortDAO extends ComponentDAO implements PortDAO {
         inputPort.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified port.
-     *
-     * @param portId The port id
-     */
     @Override
     public void deletePort(final String groupId, final String portId) {
         final ProcessGroup group = locateProcessGroup(flowController, groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardLabelDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardLabelDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardLabelDAO.java
index ea16ae3..bd774e2 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardLabelDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardLabelDAO.java
@@ -31,24 +31,10 @@ public class StandardLabelDAO extends ComponentDAO implements LabelDAO {
 
     private FlowController flowController;
 
-    /**
-     * Locates the specified label.
-     *
-     * @param groupId
-     * @param labelId
-     * @return
-     */
     private Label locateLabel(String groupId, String labelId) {
         return locateLabel(locateProcessGroup(flowController, groupId), labelId);
     }
 
-    /**
-     * Locates the specified label.
-     *
-     * @param group
-     * @param labelId
-     * @return
-     */
     private Label locateLabel(ProcessGroup group, String labelId) {
         // get the label
         Label label = group.getLabel(labelId);
@@ -61,12 +47,6 @@ public class StandardLabelDAO extends ComponentDAO implements LabelDAO {
         return label;
     }
 
-    /**
-     * Creates a label.
-     *
-     * @param labelDTO The label DTO
-     * @return The label
-     */
     @Override
     public Label createLabel(String groupId, LabelDTO labelDTO) {
         if (labelDTO.getParentGroupId() != null && !flowController.areGroupsSame(groupId, labelDTO.getParentGroupId())) {
@@ -91,23 +71,11 @@ public class StandardLabelDAO extends ComponentDAO implements LabelDAO {
         return label;
     }
 
-    /**
-     * Gets the specified label.
-     *
-     * @param labelId The label id
-     * @return The label
-     */
     @Override
     public Label getLabel(String groupId, String labelId) {
         return locateLabel(groupId, labelId);
     }
 
-    /**
-     * Determines if the specified label exists.
-     *
-     * @param labelId
-     * @return
-     */
     @Override
     public boolean hasLabel(String groupId, String labelId) {
         ProcessGroup group;
@@ -120,23 +88,12 @@ public class StandardLabelDAO extends ComponentDAO implements LabelDAO {
         return group.getLabel(labelId) != null;
     }
 
-    /**
-     * Gets all of the labels.
-     *
-     * @return The labels
-     */
     @Override
     public Set<Label> getLabels(String groupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
         return group.getLabels();
     }
 
-    /**
-     * Updates the specified label.
-     *
-     * @param labelDTO The label DTO
-     * @return The label
-     */
     @Override
     public Label updateLabel(String groupId, LabelDTO labelDTO) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -161,11 +118,6 @@ public class StandardLabelDAO extends ComponentDAO implements LabelDAO {
         return label;
     }
 
-    /**
-     * Deletes the specified label.
-     *
-     * @param labelId The label id
-     */
     @Override
     public void deleteLabel(String groupId, String labelId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardOutputPortDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardOutputPortDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardOutputPortDAO.java
index 588ea49..a33682b 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardOutputPortDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardOutputPortDAO.java
@@ -51,12 +51,6 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         return port;
     }
 
-    /**
-     * Creates a port.
-     *
-     * @param portDTO The port DTO
-     * @return The port
-     */
     @Override
     public Port createPort(String groupId, PortDTO portDTO) {
         if (isNotNull(portDTO.getParentGroupId()) && !flowController.areGroupsSame(groupId, portDTO.getParentGroupId())) {
@@ -93,23 +87,11 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         return port;
     }
 
-    /**
-     * Gets the specified port.
-     *
-     * @param portId The port id
-     * @return The port
-     */
     @Override
     public Port getPort(String groupId, String portId) {
         return locatePort(groupId, portId);
     }
 
-    /**
-     * Determines if the specified port exists.
-     *
-     * @param portId
-     * @return
-     */
     @Override
     public boolean hasPort(String groupId, String portId) {
         ProcessGroup group = flowController.getGroup(groupId);
@@ -121,11 +103,6 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         return group.getOutputPort(portId) != null;
     }
 
-    /**
-     * Gets all of the ports.
-     *
-     * @return The ports
-     */
     @Override
     public Set<Port> getPorts(String groupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -187,13 +164,6 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         }
     }
 
-    /**
-     * Validates the proposed processor configuration.
-     *
-     * @param processorNode
-     * @param config
-     * @return
-     */
     private List<String> validateProposedConfiguration(PortDTO portDTO) {
         List<String> validationErrors = new ArrayList<>();
 
@@ -207,12 +177,6 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         return validationErrors;
     }
 
-    /**
-     * Updates the specified port.
-     *
-     * @param portDTO The port DTO
-     * @return The port
-     */
     @Override
     public Port updatePort(String groupId, PortDTO portDTO) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -290,11 +254,6 @@ public class StandardOutputPortDAO extends ComponentDAO implements PortDAO {
         outputPort.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified port.
-     *
-     * @param portId The port id
-     */
     @Override
     public void deletePort(String groupId, String portId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessGroupDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessGroupDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessGroupDAO.java
index fd6f944..52887e4 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessGroupDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessGroupDAO.java
@@ -28,13 +28,6 @@ public class StandardProcessGroupDAO extends ComponentDAO implements ProcessGrou
 
     private FlowController flowController;
 
-    /**
-     * Creates a process group reference.
-     *
-     * @param parentGroupId The parent group id
-     * @param processGroup The process group
-     * @return The process group
-     */
     @Override
     public ProcessGroup createProcessGroup(String parentGroupId, ProcessGroupDTO processGroup) {
         if (processGroup.getParentGroupId() != null && !flowController.areGroupsSame(processGroup.getParentGroupId(), parentGroupId)) {
@@ -58,32 +51,16 @@ public class StandardProcessGroupDAO extends ComponentDAO implements ProcessGrou
         return group;
     }
 
-    /**
-     * Gets the specified process group.
-     *
-     * @return The process group
-     */
     @Override
     public ProcessGroup getProcessGroup(String groupId) {
         return locateProcessGroup(flowController, groupId);
     }
 
-    /**
-     * Determines if the specified process group exists.
-     *
-     * @return
-     */
     @Override
     public boolean hasProcessGroup(String groupId) {
         return flowController.getGroup(groupId) != null;
     }
 
-    /**
-     * Gets all of the process groups.
-     *
-     * @param parentGroupId The parent group id
-     * @return The process groups
-     */
     @Override
     public Set<ProcessGroup> getProcessGroups(String parentGroupId) {
         ProcessGroup group = locateProcessGroup(flowController, parentGroupId);
@@ -104,12 +81,6 @@ public class StandardProcessGroupDAO extends ComponentDAO implements ProcessGrou
         }
     }
 
-    /**
-     * Updates the specified process group.
-     *
-     * @param processGroupDTO
-     * @return The process group
-     */
     @Override
     public ProcessGroup updateProcessGroup(ProcessGroupDTO processGroupDTO) {
         final ProcessGroup group = locateProcessGroup(flowController, processGroupDTO.getId());
@@ -145,11 +116,6 @@ public class StandardProcessGroupDAO extends ComponentDAO implements ProcessGrou
         group.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified process group.
-     *
-     * @param processGroupId The process group id
-     */
     @Override
     public void deleteProcessGroup(String processGroupId) {
         // get the group

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessorDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessorDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessorDAO.java
index be33d5a..e52a476 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessorDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardProcessorDAO.java
@@ -70,12 +70,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         return processor;
     }
 
-    /**
-     * Determines if the specified processor is loaded.
-     *
-     * @param id
-     * @return
-     */
     @Override
     public boolean hasProcessor(String groupId, String id) {
         ProcessGroup group = flowController.getGroup(groupId);
@@ -87,13 +81,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         return group.getProcessor(id) != null;
     }
 
-    /**
-     * Creates a new Processor.
-     *
-     * @param groupId The group id
-     * @param processorDTO The processor configuration
-     * @return The new Processor
-     */
     @Override
     public ProcessorNode createProcessor(String groupId, ProcessorDTO processorDTO) {
         if (processorDTO.getParentGroupId() != null && !flowController.areGroupsSame(groupId, processorDTO.getParentGroupId())) {
@@ -129,13 +116,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         }
     }
 
-    /**
-     * Configures the specified processor with the specified configuration dto.
-     *
-     * @param processor
-     * @param processorDTO
-     * @return
-     */
     private void configureProcessor(ProcessorNode processor, ProcessorDTO processorDTO) {
         final ProcessorConfigDTO config = processorDTO.getConfig();
 
@@ -222,13 +202,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         }
     }
 
-    /**
-     * Validates the proposed processor configuration.
-     *
-     * @param processorNode
-     * @param config
-     * @return
-     */
     private List<String> validateProposedConfiguration(ProcessorNode processorNode, ProcessorConfigDTO config) {
         List<String> validationErrors = new ArrayList<>();
 
@@ -319,22 +292,11 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         return validationErrors;
     }
 
-    /**
-     * Gets the Processor transfer object for the specified id.
-     *
-     * @param id Id of the processor to return
-     * @return The Processor
-     */
     @Override
     public ProcessorNode getProcessor(String groupId, String id) {
         return locateProcessor(groupId, id);
     }
 
-    /**
-     * Gets all the Processors for this controller.
-     *
-     * @return List of all the Processors
-     */
     @Override
     public Set<ProcessorNode> getProcessors(String groupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -417,12 +379,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         }
     }
 
-    /**
-     * Updates the configuration for the processor using the specified
-     * processorDTO.
-     *
-     * @param processorDTO
-     */
     @Override
     public ProcessorNode updateProcessor(String groupId, ProcessorDTO processorDTO) {
         ProcessorNode processor = locateProcessor(groupId, processorDTO.getId());
@@ -482,11 +438,6 @@ public class StandardProcessorDAO extends ComponentDAO implements ProcessorDAO {
         processor.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified processor.
-     *
-     * @param processorId The processor id to delete
-     */
     @Override
     public void deleteProcessor(String groupId, String processorId) {
         // get the group and the processor

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardRemoteProcessGroupDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardRemoteProcessGroupDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardRemoteProcessGroupDAO.java
index e237b0d..2b467c0 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardRemoteProcessGroupDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardRemoteProcessGroupDAO.java
@@ -105,8 +105,8 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
     /**
      * Determines if the specified remote process group exists.
      *
-     * @param remoteProcessGroupId
-     * @return
+     * @param remoteProcessGroupId id
+     * @return true if exists
      */
     @Override
     public boolean hasRemoteProcessGroup(String groupId, String remoteProcessGroupId) {
@@ -139,9 +139,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
 
     /**
      * Verifies the specified remote group can be updated, if necessary.
-     *
-     * @param remoteProcessGroup
-     * @param remoteProcessGroupDto
      */
     private void verifyUpdate(RemoteProcessGroup remoteProcessGroup, RemoteProcessGroupDTO remoteProcessGroupDto) {
         // see if the remote process group can start/stop transmitting
@@ -192,9 +189,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
 
     /**
      * Verified the specified remote port can be updated, if necessary.
-     *
-     * @param port
-     * @param remoteProcessGroupPortDto
      */
     private void verifyUpdatePort(RemoteGroupPort port, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) {
         // see if the remote process group can start/stop transmitting
@@ -217,10 +211,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
 
     /**
      * Validates the proposed configuration for the specified remote port.
-     *
-     * @param remoteGroupPort
-     * @param remoteProcessGroupPortDTO
-     * @return
      */
     private List<String> validateProposedRemoteProcessGroupPortConfiguration(RemoteGroupPort remoteGroupPort, RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
         final List<String> validationErrors = new ArrayList<>();
@@ -235,10 +225,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
 
     /**
      * Validates the proposed configuration for the specified remote group.
-     *
-     * @param remoteProcessGroup
-     * @param remoteProcessGroupDTO
-     * @return
      */
     private List<String> validateProposedRemoteProcessGroupConfiguration(RemoteProcessGroupDTO remoteProcessGroupDTO) {
         final List<String> validationErrors = new ArrayList<>();
@@ -327,12 +313,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
         return port;
     }
 
-    /**
-     * Updates the specified remote process group.
-     *
-     * @param remoteProcessGroupDTO
-     * @return The remote process group
-     */
     @Override
     public RemoteProcessGroup updateRemoteProcessGroup(String groupId, RemoteProcessGroupDTO remoteProcessGroupDTO) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);
@@ -383,11 +363,6 @@ public class StandardRemoteProcessGroupDAO extends ComponentDAO implements Remot
         remoteProcessGroup.verifyCanDelete();
     }
 
-    /**
-     * Deletes the specified remote process group.
-     *
-     * @param remoteProcessGroupId The remote process group id
-     */
     @Override
     public void deleteRemoteProcessGroup(String groupId, String remoteProcessGroupId) {
         ProcessGroup group = locateProcessGroup(flowController, groupId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardReportingTaskDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardReportingTaskDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardReportingTaskDAO.java
index 4c85b04..ffe606a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardReportingTaskDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardReportingTaskDAO.java
@@ -43,12 +43,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
 
     private ReportingTaskProvider reportingTaskProvider;
 
-    /**
-     * Locates the specified reporting task.
-     *
-     * @param reportingTaskId
-     * @return
-     */
     private ReportingTaskNode locateReportingTask(final String reportingTaskId) {
         // get the reporting task
         final ReportingTaskNode reportingTask = reportingTaskProvider.getReportingTaskNode(reportingTaskId);
@@ -61,12 +55,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         return reportingTask;
     }
 
-    /**
-     * Creates a reporting task.
-     *
-     * @param reportingTaskDTO The reporting task DTO
-     * @return The reporting task
-     */
     @Override
     public ReportingTaskNode createReportingTask(final ReportingTaskDTO reportingTaskDTO) {
         // ensure the type is specified
@@ -78,7 +66,7 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
             // create the reporting task
             final ReportingTaskNode reportingTask = reportingTaskProvider.createReportingTask(reportingTaskDTO.getType(), reportingTaskDTO.getId(), true);
 
-            // ensure we can perform the update 
+            // ensure we can perform the update
             verifyUpdate(reportingTask, reportingTaskDTO);
 
             // perform the update
@@ -90,50 +78,27 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         }
     }
 
-    /**
-     * Gets the specified reporting task.
-     *
-     * @param reportingTaskId The reporting task id
-     * @return The reporting task
-     */
     @Override
     public ReportingTaskNode getReportingTask(final String reportingTaskId) {
         return locateReportingTask(reportingTaskId);
     }
 
-    /**
-     * Determines if the specified reporting task exists.
-     *
-     * @param reportingTaskId
-     * @return
-     */
     @Override
     public boolean hasReportingTask(final String reportingTaskId) {
         return reportingTaskProvider.getReportingTaskNode(reportingTaskId) != null;
     }
 
-    /**
-     * Gets all of the reporting tasks.
-     *
-     * @return The reporting tasks
-     */
     @Override
     public Set<ReportingTaskNode> getReportingTasks() {
         return reportingTaskProvider.getAllReportingTasks();
     }
 
-    /**
-     * Updates the specified reporting task.
-     *
-     * @param reportingTaskDTO The reporting task DTO
-     * @return The reporting task
-     */
     @Override
     public ReportingTaskNode updateReportingTask(final ReportingTaskDTO reportingTaskDTO) {
         // get the reporting task
         final ReportingTaskNode reportingTask = locateReportingTask(reportingTaskDTO.getId());
 
-        // ensure we can perform the update 
+        // ensure we can perform the update
         verifyUpdate(reportingTask, reportingTaskDTO);
 
         // perform the update
@@ -181,13 +146,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         return reportingTask;
     }
 
-    /**
-     * Validates the specified configuration for the specified reporting task.
-     *
-     * @param reportingTask
-     * @param reportingTaskDTO
-     * @return
-     */
     private List<String> validateProposedConfiguration(final ReportingTaskNode reportingTask, final ReportingTaskDTO reportingTaskDTO) {
         final List<String> validationErrors = new ArrayList<>();
 
@@ -240,12 +198,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         verifyUpdate(reportingTask, reportingTaskDTO);
     }
 
-    /**
-     * Verifies the reporting task can be updated.
-     *
-     * @param reportingTask
-     * @param reportingTaskDTO
-     */
     private void verifyUpdate(final ReportingTaskNode reportingTask, final ReportingTaskDTO reportingTaskDTO) {
         // ensure the state, if specified, is valid
         if (isNotNull(reportingTaskDTO.getState())) {
@@ -303,12 +255,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         }
     }
 
-    /**
-     * Configures the specified reporting task.
-     *
-     * @param reportingTask
-     * @param reportingTaskDTO
-     */
     private void configureReportingTask(final ReportingTaskNode reportingTask, final ReportingTaskDTO reportingTaskDTO) {
         final String name = reportingTaskDTO.getName();
         final String schedulingStrategy = reportingTaskDTO.getSchedulingStrategy();
@@ -347,11 +293,6 @@ public class StandardReportingTaskDAO extends ComponentDAO implements ReportingT
         }
     }
 
-    /**
-     * Deletes the specified reporting task.
-     *
-     * @param reportingTaskId The reporting task id
-     */
     @Override
     public void deleteReportingTask(String reportingTaskId) {
         final ReportingTaskNode reportingTask = locateReportingTask(reportingTaskId);

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardSnippetDAO.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardSnippetDAO.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardSnippetDAO.java
index 0d75c30..73a1501 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardSnippetDAO.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/dao/impl/StandardSnippetDAO.java
@@ -40,9 +40,6 @@ import org.apache.nifi.web.dao.SnippetDAO;
 import org.apache.nifi.web.util.SnippetUtils;
 import org.apache.commons.lang3.StringUtils;
 
-/**
- *
- */
 public class StandardSnippetDAO implements SnippetDAO {
 
     private FlowController flowController;
@@ -58,15 +55,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         return snippet;
     }
 
-    /**
-     * Creates a new snippet based off of an existing snippet. Used for copying
-     * and pasting.
-     *
-     * @param groupId
-     * @param originX
-     * @param originY
-     * @return
-     */
     @Override
     public FlowSnippetDTO copySnippet(final String groupId, final String snippetId, final Double originX, final Double originY) {
         try {
@@ -111,13 +99,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         }
     }
 
-    /**
-     * Creates a new snippet containing the specified components. Whether or not
-     * the snippet is linked will determine whether actions on this snippet
-     * actually affect the data flow.
-     *
-     * @return
-     */
     @Override
     public Snippet createSnippet(final SnippetDTO snippetDTO) {
         // create the snippet request
@@ -167,12 +148,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         }
     }
 
-    /**
-     * Deletes a snippet. If the snippet is linked, also deletes the underlying
-     * components.
-     *
-     * @param snippetId
-     */
     @Override
     public void deleteSnippet(String snippetId) {
         final StandardSnippet snippet = locateSnippet(snippetId);
@@ -238,12 +213,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         }
     }
 
-    /**
-     * Updates the specified snippet. If the snippet is linked, the underlying
-     * components will be moved into the specified groupId.
-     *
-     * @return
-     */
     @Override
     public Snippet updateSnippet(final SnippetDTO snippetDTO) {
         final StandardSnippet snippet = locateSnippet(snippetDTO.getId());
@@ -275,12 +244,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         return snippet;
     }
 
-    /**
-     * Looks up the actual value for any sensitive properties from the specified
-     * snippet.
-     *
-     * @param snippet
-     */
     private void lookupSensitiveProperties(final FlowSnippetDTO snippet) {
         // ensure that contents have been specified
         if (snippet != null) {
@@ -302,12 +265,6 @@ public class StandardSnippetDAO implements SnippetDAO {
         }
     }
 
-    /**
-     * Looks up the actual value for any sensitive properties from the specified
-     * processors.
-     *
-     * @param snippet
-     */
     private void lookupSensitiveProcessorProperties(final Set<ProcessorDTO> processors) {
         final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId());
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/filter/NodeRequestFilter.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/filter/NodeRequestFilter.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/filter/NodeRequestFilter.java
index 9079d0f..13a8bde 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/filter/NodeRequestFilter.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/filter/NodeRequestFilter.java
@@ -40,18 +40,13 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.web.context.support.WebApplicationContextUtils;
 
 /**
- * A filter that prevents direct access to nodes (i.e., flow controllers
- * connected to a cluster). Direct access to nodes by clients external to the
- * cluster is prevented because the dataflow must be identical across the
- * cluster.
+ * A filter that prevents direct access to nodes (i.e., flow controllers connected to a cluster). Direct access to nodes by clients external to the cluster is prevented because the dataflow must be
+ * identical across the cluster.
  *
- * Direct access to a node is determined by the presence of a custom request
- * header. The header key is "X-CLUSTER_MANAGER" and the value can be
- * anything/empty. The presence of this header is a simple way to flag that the
- * request was issued by the cluster manager and may proceed to the next filter.
+ * Direct access to a node is determined by the presence of a custom request header. The header key is "X-CLUSTER_MANAGER" and the value can be anything/empty. The presence of this header is a simple
+ * way to flag that the request was issued by the cluster manager and may proceed to the next filter.
  *
- * Since this header may be faked, we only make decisions about the header if
- * the application instance is a node and connected to the cluster.
+ * Since this header may be faked, we only make decisions about the header if the application instance is a node and connected to the cluster.
  *
  * @author unattributed
  */
@@ -72,8 +67,8 @@ public class NodeRequestFilter implements Filter {
         HttpServletResponse httpResp = (HttpServletResponse) resp;
 
         /*
-         * If we're the cluster manager or we're sent head requests, continue.  
-         * Head requests are included because there exists a AJAX/XHR race 
+         * If we're the cluster manager or we're sent head requests, continue.
+         * Head requests are included because there exists a AJAX/XHR race
          * condition between the following requests:
          *      HEAD /nifi-api/cluster
          *      GET  /nifi-api/controller/config
@@ -105,7 +100,7 @@ public class NodeRequestFilter implements Filter {
                     }
                 }
 
-                // if don't have a cluster context or the context indicates 
+                // if don't have a cluster context or the context indicates
                 if (clusterContext == null || !clusterContext.isRequestSentByClusterManager()) {
                     // node is connected and request is not from cluster manager, so respond with error
                     httpResp.setContentType("text/plain");

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/Availability.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/Availability.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/Availability.java
index 7f51e0f..91f53b5 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/Availability.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/Availability.java
@@ -26,8 +26,7 @@ public enum Availability {
      */
     NCM,
     /**
-     * Service or reporting task will run only on NiFi Nodes (or standalone
-     * instance, if not clustered)
+     * Service or reporting task will run only on NiFi Nodes (or standalone instance, if not clustered)
      */
     NODE;
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/SnippetUtils.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/SnippetUtils.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/SnippetUtils.java
index b0e7f3c..6d89cbf 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/SnippetUtils.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/util/SnippetUtils.java
@@ -70,9 +70,9 @@ public final class SnippetUtils {
     /**
      * Populates the specified snippet and returns the details.
      *
-     * @param snippet
-     * @param recurse
-     * @return
+     * @param snippet snippet
+     * @param recurse recurse
+     * @return snippet
      */
     public FlowSnippetDTO populateFlowSnippet(Snippet snippet, boolean recurse) {
         final FlowSnippetDTO snippetDto = new FlowSnippetDTO();
@@ -477,7 +477,7 @@ public final class SnippetUtils {
         // if there is any controller service that maps to another controller service, update the id's
         updateControllerServiceIdentifiers(snippetContentsCopy, serviceIdMap);
 
-        // 
+        //
         // Copy ProcessGroups
         //
         // instantiate the process groups, renaming as necessary
@@ -578,11 +578,7 @@ public final class SnippetUtils {
     }
 
     /**
-     * Generates a new id for the current id that is specified. If no seed is
-     * found, a new random id will be created.
-     *
-     * @param currentId
-     * @return
+     * Generates a new id for the current id that is specified. If no seed is found, a new random id will be created.
      */
     private String generateId(final String currentId) {
         final ClusterContext clusterContext = ClusterContextThreadLocal.getContext();

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/NiFiWebApiTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/NiFiWebApiTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/NiFiWebApiTest.java
index b81863b..dbe158a 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/NiFiWebApiTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/NiFiWebApiTest.java
@@ -47,12 +47,6 @@ import org.junit.Ignore;
 @Ignore
 public class NiFiWebApiTest {
 
-    /**
-     * Populates the flow.
-     *
-     * @param revision The revision to use
-     * @return The new revision
-     */
     public static void populateFlow(Client client, String baseUrl, String clientId) throws Exception {
         NiFiTestUser dfm = new NiFiTestUser(client, DfmAccessControlTest.DFM_USER_DN);
 
@@ -248,27 +242,6 @@ public class NiFiWebApiTest {
             throw new Exception("Unable to populate initial flow: " + responseEntity);
         }
 
-//        // -----------------------------------------------
-//        // Create a remote process group
-//        // -----------------------------------------------
-//        
-//        // create the remote process group
-//        RemoteProcessGroupDTO remoteProcessGroup = new RemoteProcessGroupDTO();
-//        remoteProcessGroup.setTargetUri(baseUrl);
-//        
-//        // create the remote process group entity
-//        RemoteProcessGroupEntity remoteProcessGroupEntity = new RemoteProcessGroupEntity();
-//        remoteProcessGroupEntity.setRevision(NiFiTestUser.REVISION);
-//        remoteProcessGroupEntity.setClientId(clientId);
-//        remoteProcessGroupEntity.setRemoteProcessGroup(remoteProcessGroup);
-//        
-//        // add the remote process group
-//        response = dfm.testPost(baseUrl + "/controller/process-groups/root/remote-process-groups", remoteProcessGroupEntity);
-//        
-//        // ensure a successful response
-//        if (Status.CREATED.getStatusCode() != response.getStatusInfo().getStatusCode()) {
-//            throw new Exception("Unable to populate initial flow.");
-//        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/AdminAccessControlTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/AdminAccessControlTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/AdminAccessControlTest.java
index ac554a7..8e0efd1 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/AdminAccessControlTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/AdminAccessControlTest.java
@@ -105,7 +105,7 @@ public class AdminAccessControlTest {
     /**
      * Ensures the admin user can get a groups content.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testGroupGet() throws Exception {
@@ -137,13 +137,12 @@ public class AdminAccessControlTest {
         Assert.assertEquals(1, processGroupContentsDTO.getInputPorts().size());
         Assert.assertEquals(1, processGroupContentsDTO.getOutputPorts().size());
         Assert.assertEquals(1, processGroupContentsDTO.getLabels().size());
-//        Assert.assertEquals(1, processGroupContentsDTO.getRemoteProcessGroups().size());
     }
 
     /**
      * Verifies the admin user cannot update a group.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testGroupPut() throws Exception {
@@ -167,7 +166,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can retrieve the controller configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testControllerConfigurationGet() throws Exception {
@@ -192,7 +191,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user cannot update the controller configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testControllerConfigurationPut() throws Exception {
@@ -216,7 +215,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the read only user cannot create a new flow archive.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testFlowConfigurationArchivePost() throws Exception {
@@ -237,7 +236,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can retrieve his credentials.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testAuthoritiesGet() throws Exception {
@@ -260,7 +259,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can retrieve the banners.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testBannersGet() throws Exception {
@@ -283,7 +282,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can retrieve the processor types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorTypesGet() throws Exception {
@@ -305,7 +304,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can retrieve the prioritizer types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testPrioritizerTypesGet() throws Exception {
@@ -330,7 +329,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorGroupsGet() throws Exception {
@@ -351,7 +350,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the operator user cannot create new process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPost() throws Exception {
@@ -374,10 +373,9 @@ public class AdminAccessControlTest {
     }
 
     /**
-     * Verifies that the operator user cannot update process group
-     * configuration.
+     * Verifies that the operator user cannot update process group configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPut() throws Exception {
@@ -402,7 +400,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the operator user cannot delete process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupDelete() throws Exception {
@@ -421,7 +419,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorsGet() throws Exception {
@@ -442,7 +440,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create new processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPost() throws Exception {
@@ -467,7 +465,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create new processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPut() throws Exception {
@@ -492,7 +490,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot delete processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorDelete() throws Exception {
@@ -511,7 +509,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionsGet() throws Exception {
@@ -532,7 +530,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionPost() throws Exception {
@@ -557,7 +555,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionPut() throws Exception {
@@ -582,7 +580,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot delete connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionDelete() throws Exception {
@@ -601,7 +599,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortsGet() throws Exception {
@@ -622,7 +620,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortPost() throws Exception {
@@ -647,7 +645,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortPut() throws Exception {
@@ -672,7 +670,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot delete input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortDelete() throws Exception {
@@ -691,7 +689,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortsGet() throws Exception {
@@ -712,7 +710,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortPost() throws Exception {
@@ -737,7 +735,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortPut() throws Exception {
@@ -762,7 +760,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot delete output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortDelete() throws Exception {
@@ -781,7 +779,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user can get input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelsGet() throws Exception {
@@ -802,7 +800,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelPost() throws Exception {
@@ -827,7 +825,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot create labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelPut() throws Exception {
@@ -852,7 +850,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies that the admin user cannot delete labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelDelete() throws Exception {
@@ -868,91 +866,13 @@ public class AdminAccessControlTest {
     // ----------------------------------------------
     // REMOTE PROCESS GROUP
     // ----------------------------------------------
-//    /**
-//     * Verifies that the admin user can get input ports.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupsGet() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // perform the request
-//        ClientResponse response = ADMIN_USER.testGet(url);
-//        
-//        // get the response
-//        RemoteProcessGroupsEntity entity = response.getEntity(RemoteProcessGroupsEntity.class);
-//        
-//        // ensure the request was successful
-//        Assert.assertEquals(200, response.getStatus());
-//        Assert.assertNotNull(entity.getRemoteProcessGroups());
-//        Assert.assertEquals(1, entity.getRemoteProcessGroups().size());
-//    }
-//    
-//    /**
-//     * Verifies that the admin user cannot create new remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupPost() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        entity.setClientId(CLIENT_ID);
-//        
-//        // perform the request
-//        ClientResponse response = ADMIN_USER.testPost(url, entity);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
-//    
-//    /**
-//     * Verifies that the admin user update remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupPut() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups/1";
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        entity.setClientId(CLIENT_ID);
-//        
-//        // perform the request
-//        ClientResponse response = ADMIN_USER.testPut(url, entity);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
-//    
-//    /**
-//     * Verifies that the admin user cannot delete remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupDelete() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups/1";
-//        
-//        // perform the request
-//        ClientResponse response = ADMIN_USER.testDelete(url);
-//        
-//        // ensure the request is failed with a forbidden status code
-//        Assert.assertEquals(403, response.getStatus());
-//    }
     // ----------------------------------------------
     // HISTORY
     // ----------------------------------------------
     /**
      * Tests the ability to retrieve the NiFi history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryGet() throws Exception {
@@ -972,7 +892,7 @@ public class AdminAccessControlTest {
     /**
      * Tests the ability to retrieve a specific action.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testActionGet() throws Exception {
@@ -989,7 +909,7 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can purge history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryDelete() throws Exception {
@@ -1012,7 +932,7 @@ public class AdminAccessControlTest {
     /**
      * Tests the ability to retrieve the NiFi users.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUsersGet() throws Exception {
@@ -1028,7 +948,7 @@ public class AdminAccessControlTest {
     /**
      * Tests the ability to retrieve a specific user.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUserGet() throws Exception {
@@ -1045,9 +965,9 @@ public class AdminAccessControlTest {
     /**
      * Verifies the admin user can update a person.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
-//    @Test
+    //@Test
     public void testUserUpdate() throws Exception {
         String url = BASE_URL + "/controller/users";
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/e1160f59/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/DfmAccessControlTest.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/DfmAccessControlTest.java b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/DfmAccessControlTest.java
index 03b26ca..283a4a9 100644
--- a/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/DfmAccessControlTest.java
+++ b/nifi/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/test/java/org/apache/nifi/integration/accesscontrol/DfmAccessControlTest.java
@@ -137,11 +137,6 @@ public class DfmAccessControlTest {
     // ----------------------------------------------
     // PROCESS GROUPS
     // ----------------------------------------------
-    /**
-     * Ensures the dfm user can get a group contents.
-     *
-     * @throws Exception
-     */
     @Test
     public void testGroupGet() throws Exception {
         String url = BASE_URL + "/controller/process-groups/root";
@@ -179,7 +174,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can update a group.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testGroupPut() throws Exception {
@@ -203,7 +198,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can retrieve the controller configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testControllerConfiguration() throws Exception {
@@ -242,7 +237,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the read only user cannot create a new flow archive.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testFlowConfigurationArchivePost() throws Exception {
@@ -263,7 +258,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can retrieve his credentials.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testAuthoritiesGet() throws Exception {
@@ -286,7 +281,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can retrieve the banners.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testBannersGet() throws Exception {
@@ -309,7 +304,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can retrieve the processor types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorTypesGet() throws Exception {
@@ -331,7 +326,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can retrieve the prioritizer types.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     @Ignore
@@ -357,7 +352,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update a process group state.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPutState() throws Exception {
@@ -387,7 +382,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update a process group configuration.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupPutConfiguration() throws Exception {
@@ -422,7 +417,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update processor state.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPutState() throws Exception {
@@ -452,7 +447,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update processor state.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorPutConfiguration() throws Exception {
@@ -487,7 +482,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionPut() throws Exception {
@@ -517,7 +512,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelPut() throws Exception {
@@ -552,7 +547,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortPut() throws Exception {
@@ -587,7 +582,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can update output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortPut() throws Exception {
@@ -619,37 +614,13 @@ public class DfmAccessControlTest {
         Assert.assertEquals("new output port name", entity.getOutputPort().getName());
     }
 
-//    /**
-//     * Verifies that the dfm user can update remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupPut() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // get a random controller reference
-//        RemoteProcessGroupDTO controllerReference = getRandomRemoteProcessGroup();
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        entity.setClientId(CLIENT_ID);
-//        entity.setRemoteProcessGroup(controllerReference);
-//        
-//        // perform the request
-//        ClientResponse response = DFM_USER.testPut(url + "/" + controllerReference.getId(), entity);
-//        
-//        // ensure the request succeeded
-//        Assert.assertEquals(200, response.getStatus());
-//    }
     // ----------------------------------------------
     // POST and DELETE
     // ----------------------------------------------
     /**
      * Verifies that the dfm user can create/delete processors.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessorCreateDelete() throws Exception {
@@ -660,7 +631,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can create/delete process groups.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testProcessGroupCreateDelete() throws Exception {
@@ -671,7 +642,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can create/delete input ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testInputPortCreateDelete() throws Exception {
@@ -682,7 +653,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can create/delete output ports.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testOutputPortCreateDelete() throws Exception {
@@ -693,7 +664,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can create/delete connections.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testConnectionCreateDelete() throws Exception {
@@ -707,7 +678,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies that the dfm user can create/delete labels.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testLabelCreateDelete() throws Exception {
@@ -715,23 +686,13 @@ public class DfmAccessControlTest {
         deleteLabel(label.getId());
     }
 
-//    /**
-//     * Verifies that the dfm user can create/delete remote process groups.
-//     * 
-//     * @throws Exception 
-//     */
-//    @Test
-//    public void testRemoteProcessGroupCreateDelete() throws Exception {
-//        RemoteProcessGroupDTO remoteProcessGroup = createRemoteProcessGroup();
-//        deleteRemoteProcessGroup(remoteProcessGroup.getId());
-//    }
     // ----------------------------------------------
     // HISTORY
     // ----------------------------------------------
     /**
      * Tests the ability to retrieve the NiFi history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryGet() throws Exception {
@@ -751,7 +712,7 @@ public class DfmAccessControlTest {
     /**
      * Tests the ability to retrieve a specific action.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testActionGet() throws Exception {
@@ -768,7 +729,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the dfm user can purge history.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testHistoryDelete() throws Exception {
@@ -920,27 +881,6 @@ public class DfmAccessControlTest {
         return inputPortsIter.next();
     }
 
-//    private RemoteProcessGroupDTO getRandomRemoteProcessGroup() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // get the remote process groups
-//        ClientResponse response = DFM_USER.testGet(url);
-//        
-//        // ensure the response was successful
-//        Assert.assertEquals(200, response.getStatus());
-//        
-//        // get the remote process group dtos
-//        RemoteProcessGroupsEntity remoteProcessGroupEntity = response.getEntity(RemoteProcessGroupsEntity.class);
-//        Collection<RemoteProcessGroupDTO> remoteProcessGroup = remoteProcessGroupEntity.getRemoteProcessGroups();
-//        
-//        // ensure the correct number of remote process group
-//        Assert.assertFalse(remoteProcessGroup.isEmpty());
-//        
-//        // use the first remote process group as the target
-//        Iterator<RemoteProcessGroupDTO> controllerReferenceIter = remoteProcessGroup.iterator();
-//        Assert.assertTrue(controllerReferenceIter.hasNext());
-//        return controllerReferenceIter.next();
-//    }
     // ----------------------------------------------
     // Create resource utility methods
     // ----------------------------------------------
@@ -1169,32 +1109,6 @@ public class DfmAccessControlTest {
         return label;
     }
 
-//    public RemoteProcessGroupDTO createRemoteProcessGroup() throws Exception {
-//        String url = BASE_URL + "/controller/process-groups/root/remote-process-groups";
-//        
-//        // create the controller reference
-//        RemoteProcessGroupDTO remoteProcessGroup = new RemoteProcessGroupDTO();
-//        remoteProcessGroup.setTargetUri(BASE_URL);
-//        
-//        // create the entity body
-//        RemoteProcessGroupEntity entity = new RemoteProcessGroupEntity();
-//        entity.setRevision(NiFiTestUser.REVISION);
-//        entity.setClientId(CLIENT_ID);
-//        entity.setRemoteProcessGroup(remoteProcessGroup);
-//        
-//        // perform the request
-//        ClientResponse response = DFM_USER.testPost(url, entity);
-//        
-//        // ensure the request is successful
-//        Assert.assertEquals(201, response.getStatus());
-//        
-//        // get the entity body
-//        entity = response.getEntity(RemoteProcessGroupEntity.class);
-//        
-//        // get the controller reference id
-//        Assert.assertNotNull(entity.getRemoteProcessGroup());
-//        return entity.getRemoteProcessGroup();
-//    }
     // ----------------------------------------------
     // Delete resource utility methods
     // ----------------------------------------------
@@ -1339,7 +1253,7 @@ public class DfmAccessControlTest {
     /**
      * Tests the ability to retrieve the NiFi users.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUsersGet() throws Exception {
@@ -1355,7 +1269,7 @@ public class DfmAccessControlTest {
     /**
      * Tests the ability to retrieve a specific user.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUserGet() throws Exception {
@@ -1372,7 +1286,7 @@ public class DfmAccessControlTest {
     /**
      * Verifies the admin user can update a person.
      *
-     * @throws Exception
+     * @throws Exception ex
      */
     @Test
     public void testUserPut() throws Exception {