You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by ma...@apache.org on 2015/10/25 16:12:05 UTC

[01/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Repository: nifi
Updated Branches:
  refs/heads/master f2c4f2d2a -> 8a8006085


http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index 68155d1..98a56bf 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -16,33 +16,7 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.processor.DataUnit;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.Relationship;
 import java.io.BufferedWriter;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.Validator;
-import org.apache.nifi.expression.AttributeValueDecorator;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
-import org.apache.nifi.logging.ProcessorLog;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.OutputStreamCallback;
-import org.apache.nifi.processor.io.StreamCallback;
-import org.apache.nifi.processor.util.FlowFileFilters;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.processors.standard.util.NLKBufferedReader;
-import org.apache.nifi.util.StopWatch;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -58,9 +32,37 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.expression.AttributeValueDecorator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.DataUnit;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.processor.io.StreamCallback;
+import org.apache.nifi.processor.util.FlowFileFilters;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.standard.util.NLKBufferedReader;
+import org.apache.nifi.stream.io.StreamUtils;
+import org.apache.nifi.util.StopWatch;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of "
         + "the content that matches the Regular Expression with some alternate value.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
index 04a9c56..f68ac6c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
@@ -39,12 +39,19 @@ import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -52,21 +59,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
-import org.apache.commons.lang3.StringUtils;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex", "Mapping"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that "
         + "matches the Regular Expression with some alternate value provided in a mapping file.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
index 7055a8a..d681793 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
@@ -29,6 +29,8 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -59,6 +61,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "routing", "Attribute Expression Language", "regexp", "regex", "Regular Expression", "Expression Language"})
 @CapabilityDescription("Routes FlowFiles based on their Attributes using the Attribute Expression Language")
 @DynamicProperty(name = "Relationship Name", value = "Attribute Expression Language", supportsExpressionLanguage = true, description = "Routes FlowFiles whose "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
index 937bc69..c63839c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
@@ -29,10 +29,18 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.DynamicRelationship;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -40,20 +48,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.DynamicRelationship;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.IntegerHolder;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"route", "content", "regex", "regular expression", "regexp"})
 @CapabilityDescription("Applies Regular Expressions to the content of a FlowFile and routes a copy of the FlowFile to each "
         + "destination whose Regular Expression matches. Regular Expressions are added as User-Defined Properties where the name "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
index 1f0fc7b..aa88827 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
@@ -32,28 +32,31 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
-import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
+import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"scan", "attributes", "search", "lookup"})
 @CapabilityDescription("Scans the specified attributes of FlowFiles, checking to see if any of their values are "
         + "present within the specified dictionary of terms")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
index 445249b..6fe8446 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
@@ -35,11 +35,13 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -63,6 +65,7 @@ import org.apache.nifi.util.search.ahocorasick.SearchState;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"aho-corasick", "scan", "content", "byte sequence", "search", "find", "dictionary"})
 @CapabilityDescription("Scans the content of FlowFiles for terms that are found in a user-supplied dictionary. If a term is matched, the UTF-8 "
         + "encoded version of the term will be added to the FlowFile using the 'matching.term' attribute")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
index e5e90ea..7b1103f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
@@ -26,13 +26,15 @@ import java.util.Set;
 import java.util.UUID;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -48,6 +50,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"segment", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Segments a FlowFile into multiple smaller segments on byte boundaries. Each segment is given the following attributes: "
         + "fragment.identifier, fragment.index, fragment.count, segment.original.filename; these attributes can then be used by the "
         + "MergeContent processor in order to reconstitute the original FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
index 3da1bd5..3cdf787 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
@@ -33,14 +33,16 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
@@ -64,6 +66,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"content", "split", "binary"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits incoming FlowFiles by a specified byte sequence")
 @WritesAttributes({
     @WritesAttribute(attribute = "fragment.identifier", description = "All split FlowFiles produced from the same parent FlowFile will have the same randomly generated UUID added for this attribute"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
index a3a4ed8..dfd09a2 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
@@ -16,12 +16,21 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -38,21 +47,16 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"json", "split", "jsonpath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a JSON File into multiple, separate FlowFiles for an array element specified by a JsonPath expression. "
         + "Each generated FlowFile is comprised of an element of the specified array and transferred to relationship 'split,' "
         + "with the original file transferred to the 'original' relationship. If the specified JsonPath is not found or "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
index 56bd729..e966880 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
@@ -16,50 +16,53 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
-import org.apache.nifi.stream.io.ByteArrayOutputStream;
-import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
+import org.apache.nifi.stream.io.ByteArrayOutputStream;
+import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.util.IntegerHolder;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.UUID;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"split", "text"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a text file into multiple smaller text files on line boundaries, each having up to a configured number of lines")
 @WritesAttributes({
     @WritesAttribute(attribute = "text.line.count", description = "The number of lines of text from the original FlowFile that were copied to this FlowFile"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index 617fcbe..a8453bb 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -29,27 +29,28 @@ import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.XmlElementNotifier;
+import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.util.BooleanHolder;
-
-import org.apache.commons.lang3.StringEscapeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
@@ -63,6 +64,7 @@ import org.xml.sax.XMLReader;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits an XML File into multiple separate FlowFiles, each comprising a child or descendant of the original root element")
 public class SplitXml extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
index fc4730c..e77dfc6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
@@ -35,6 +35,8 @@ import javax.xml.transform.stream.StreamSource;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -62,6 +64,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "xslt", "transform"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Applies the provided XSLT file to the flowfile XML payload. A new FlowFile is created "
         + "with transformed content and is routed to the 'success' relationship. If the XSL transform "
         + "fails, the original FlowFile is routed to the 'failure' relationship")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
index ff4d936..e94853b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
@@ -35,14 +35,16 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -67,6 +69,7 @@ import org.apache.nifi.util.ObjectHolder;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Unpack", "un-merge", "tar", "zip", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Unpacks the content of FlowFiles that have been packaged with one of several different Packaging Formats, emitting one to many "
         + "FlowFiles for each input FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
index d505898..3693590 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
@@ -31,6 +31,14 @@ import javax.xml.validation.Schema;
 import javax.xml.validation.SchemaFactory;
 import javax.xml.validation.Validator;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -39,21 +47,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.BooleanHolder;
-
 import org.xml.sax.SAXException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"xml", "schema", "validation", "xsd"})
 @CapabilityDescription("Validates the contents of FlowFiles against a user-specified XML Schema file")
 public class ValidateXml extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
index dd81289..8cf5726 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
@@ -31,9 +31,13 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -57,11 +61,9 @@ import org.apache.nifi.search.Searchable;
 import org.apache.nifi.update.attributes.Action;
 import org.apache.nifi.update.attributes.Condition;
 import org.apache.nifi.update.attributes.Criteria;
-import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.FlowFilePolicy;
+import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.serde.CriteriaSerDe;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 
 /**
  * This processor supports updating flowfile attributes and can do so
@@ -116,6 +118,7 @@ import org.apache.nifi.annotation.behavior.WritesAttribute;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "modification", "update", "delete", "Attribute Expression Language"})
 @CapabilityDescription("Updates the Attributes for a FlowFile by using the Attribute Expression Language and/or deletes the attributes based on a regular expression")
 @DynamicProperty(name = "A FlowFile attribute to update", value = "The value to set it to", supportsExpressionLanguage = true,


[12/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index cbd0f88..0c39eda 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -16,14 +16,6 @@
  */
 package org.apache.nifi.controller;
 
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
-import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-
 import static java.util.Objects.requireNonNull;
 
 import java.util.ArrayList;
@@ -43,6 +35,17 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.connectable.Connectable;
@@ -61,8 +64,6 @@ import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.scheduling.SchedulingStrategy;
 import org.apache.nifi.util.FormatUtils;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.quartz.CronExpression;
 import org.slf4j.LoggerFactory;
 
@@ -73,1185 +74,1242 @@ import org.slf4j.LoggerFactory;
  */
 public class StandardProcessorNode extends ProcessorNode implements Connectable {
 
-    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
-
-    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
-    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
-    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
-    private final AtomicReference<ProcessGroup> processGroup;
-    private final Processor processor;
-    private final AtomicReference<String> identifier;
-    private final Map<Connection, Connectable> destinations;
-    private final Map<Relationship, Set<Connection>> connections;
-    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
-    private final AtomicReference<List<Connection>> incomingConnectionsRef;
-    private final ReentrantReadWriteLock rwLock;
-    private final Lock readLock;
-    private final Lock writeLock;
-    private final AtomicBoolean isolated;
-    private final AtomicBoolean lossTolerant;
-    private final AtomicReference<ScheduledState> scheduledState;
-    private final AtomicReference<String> comments;
-    private final AtomicReference<String> name;
-    private final AtomicReference<Position> position;
-    private final AtomicReference<String> annotationData;
-    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
-    private final AtomicReference<String> yieldPeriod;
-    private final AtomicReference<String> penalizationPeriod;
-    private final AtomicReference<Map<String, String>> style;
-    private final AtomicInteger concurrentTaskCount;
-    private final AtomicLong yieldExpiration;
-    private final AtomicLong schedulingNanos;
-    private final boolean triggerWhenEmpty;
-    private final boolean sideEffectFree;
-    private final boolean triggeredSerially;
-    private final boolean triggerWhenAnyDestinationAvailable;
-    private final boolean eventDrivenSupported;
-    private final boolean batchSupported;
-    private final ValidationContextFactory validationContextFactory;
-    private final ProcessScheduler processScheduler;
-    private long runNanos = 0L;
-
-    private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
-
-    @SuppressWarnings("deprecation")
-    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
-            final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
-        super(processor, uuid, validationContextFactory, controllerServiceProvider);
-
-        this.processor = processor;
-        identifier = new AtomicReference<>(uuid);
-        destinations = new HashMap<>();
-        connections = new HashMap<>();
-        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
-        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
-        rwLock = new ReentrantReadWriteLock(false);
-        readLock = rwLock.readLock();
-        writeLock = rwLock.writeLock();
-        lossTolerant = new AtomicBoolean(false);
-        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
-        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
-        comments = new AtomicReference<>("");
-        name = new AtomicReference<>(processor.getClass().getSimpleName());
-        schedulingPeriod = new AtomicReference<>("0 sec");
-        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
-        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
-        yieldExpiration = new AtomicLong(0L);
-        concurrentTaskCount = new AtomicInteger(1);
-        position = new AtomicReference<>(new Position(0D, 0D));
-        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
-        this.processGroup = new AtomicReference<>();
-        processScheduler = scheduler;
-        annotationData = new AtomicReference<>();
-        isolated = new AtomicBoolean(false);
-        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
-
-        final Class<?> procClass = processor.getClass();
-        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
-        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
-        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
-        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
-        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
-        this.validationContextFactory = validationContextFactory;
-        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
-        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
-    }
-
-    /**
-     * @return comments about this specific processor instance
-     */
-    @Override
-    public String getComments() {
-        return comments.get();
-    }
-
-    /**
-     * Provides and opportunity to retain information about this particular processor instance
-     *
-     * @param comments new comments
-     */
-    @Override
-    public void setComments(final String comments) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.comments.set(comments);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public ScheduledState getScheduledState() {
-        return scheduledState.get();
-    }
-
-    @Override
-    public Position getPosition() {
-        return position.get();
-    }
-
-    @Override
-    public void setPosition(Position position) {
-        this.position.set(position);
-    }
-
-    @Override
-    public Map<String, String> getStyle() {
-        return style.get();
-    }
-
-    @Override
-    public void setStyle(final Map<String, String> style) {
-        if (style != null) {
-            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
-        }
-    }
-
-    @Override
-    public String getIdentifier() {
-        return identifier.get();
-    }
-
-    /**
-     * @return if true flow file content generated by this processor is considered loss tolerant
-     */
-    @Override
-    public boolean isLossTolerant() {
-        return lossTolerant.get();
-    }
-
-    @Override
-    public boolean isIsolated() {
-        return isolated.get();
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenEmpty() {
-        return triggerWhenEmpty;
-    }
-
-    /**
-     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
-     */
-    @Override
-    public boolean isSideEffectFree() {
-        return sideEffectFree;
-    }
-
-    @Override
-    public boolean isHighThroughputSupported() {
-        return batchSupported;
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenAnyDestinationAvailable() {
-        return triggerWhenAnyDestinationAvailable;
-    }
-
-    /**
-     * Indicates whether flow file content made by this processor must be persisted
-     *
-     * @param lossTolerant tolerant
-     */
-    @Override
-    public void setLossTolerant(final boolean lossTolerant) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.lossTolerant.set(lossTolerant);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Indicates whether the processor runs on only the primary node.
-     *
-     * @param isolated isolated
-     */
-    public void setIsolated(final boolean isolated) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.isolated.set(isolated);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isAutoTerminated(final Relationship relationship) {
-        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
-        if (terminatable == null) {
-            return false;
-        }
-        return terminatable.contains(relationship);
-    }
-
-    @Override
-    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            for (final Relationship rel : terminate) {
-                if (!getConnections(rel).isEmpty()) {
-                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
-                }
-            }
-            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
-     */
-    @Override
-    public Set<Relationship> getAutoTerminatedRelationships() {
-        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
-        if (relationships == null) {
-            relationships = new HashSet<>();
-        }
-        return Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    public String getName() {
-        return name.get();
-    }
-
-    /**
-     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
-     */
-    @SuppressWarnings("deprecation")
-    public String getProcessorDescription() {
-        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
-        String description = null;
-        if (capDesc != null) {
-            description = capDesc.value();
-        } else {
-            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
-                    = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
-            if (deprecatedCapDesc != null) {
-                description = deprecatedCapDesc.value();
-            }
-        }
-
-        return description;
-    }
-
-    @Override
-    public void setName(final String name) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.name.set(name);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
-     */
-    @Override
-    public long getSchedulingPeriod(final TimeUnit timeUnit) {
-        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
-    }
-
-    @Override
-    public boolean isEventDrivenSupported() {
-        readLock.lock();
-        try {
-            return this.eventDrivenSupported;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    /**
-     * Updates the Scheduling Strategy used for this Processor
-     *
-     * @param schedulingStrategy strategy
-     *
-     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
-     */
-    @Override
-    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
-        writeLock.lock();
-        try {
-            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
-                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
-                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
-                // Mode. Instead, we will simply leave it in Timer-Driven mode
-                return;
-            }
-
-            this.schedulingStrategy = schedulingStrategy;
-            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return the currently configured scheduling strategy
-     */
-    @Override
-    public SchedulingStrategy getSchedulingStrategy() {
-        readLock.lock();
-        try {
-            return this.schedulingStrategy;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public String getSchedulingPeriod() {
-        return schedulingPeriod.get();
-    }
-
-    @Override
-    public void setScheduldingPeriod(final String schedulingPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            switch (schedulingStrategy) {
-                case CRON_DRIVEN: {
-                    try {
-                        new CronExpression(schedulingPeriod);
-                    } catch (final Exception e) {
-                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
-                    }
-                }
-                break;
-                case PRIMARY_NODE_ONLY:
-                case TIMER_DRIVEN: {
-                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
-                    if (schedulingNanos < 0) {
-                        throw new IllegalArgumentException("Scheduling Period must be positive");
-                    }
-                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
-                }
-                break;
-                case EVENT_DRIVEN:
-                default:
-                    return;
-            }
-
-            this.schedulingPeriod.set(schedulingPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getRunDuration(final TimeUnit timeUnit) {
-        readLock.lock();
-        try {
-            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
-        writeLock.lock();
-        try {
-            if (duration < 0) {
-                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
-            }
-
-            this.runNanos = timeUnit.toNanos(duration);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getYieldPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getYieldPeriod() {
-        return yieldPeriod.get();
-    }
-
-    @Override
-    public void setYieldPeriod(final String yieldPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
-            if (yieldMillis < 0) {
-                throw new IllegalArgumentException("Yield duration must be positive");
-            }
-            this.yieldPeriod.set(yieldPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
-     * methods.
-     */
-    @Override
-    public void yield() {
-        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
-        yield(yieldMillis, TimeUnit.MILLISECONDS);
-
-        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
-        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
-    }
-
-    @Override
-    public void yield(final long period, final TimeUnit timeUnit) {
-        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
-        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
-
-        processScheduler.yield(this);
-    }
-
-    /**
-     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
-     */
-    @Override
-    public long getYieldExpiration() {
-        return yieldExpiration.get();
-    }
-
-    @Override
-    public long getPenalizationPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getPenalizationPeriod() {
-        return penalizationPeriod.get();
-    }
-
-    @Override
-    public void setPenalizationPeriod(final String penalizationPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
-            if (penalizationMillis < 0) {
-                throw new IllegalArgumentException("Penalization duration must be positive");
-            }
-            this.penalizationPeriod.set(penalizationPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Determines the number of concurrent tasks that may be running for this processor.
-     *
-     * @param taskCount a number of concurrent tasks this processor may have running
-     * @throws IllegalArgumentException if the given value is less than 1
-     */
-    @Override
-    public void setMaxConcurrentTasks(final int taskCount) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
-                throw new IllegalArgumentException();
-            }
-            if (!triggeredSerially) {
-                concurrentTaskCount.set(taskCount);
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isTriggeredSerially() {
-        return triggeredSerially;
-    }
-
-    /**
-     * @return the number of tasks that may execute concurrently for this processor
-     */
-    @Override
-    public int getMaxConcurrentTasks() {
-        return concurrentTaskCount.get();
-    }
-
-    @Override
-    public LogLevel getBulletinLevel() {
-        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
-    }
-
-    @Override
-    public void setBulletinLevel(final LogLevel level) {
-        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
-    }
-
-    @Override
-    public Set<Connection> getConnections() {
-        final Set<Connection> allConnections = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Set<Connection> connectionSet : connections.values()) {
-                allConnections.addAll(connectionSet);
-            }
-        } finally {
-            readLock.unlock();
-        }
-
-        return allConnections;
-    }
-
-    @Override
-    public List<Connection> getIncomingConnections() {
-        return incomingConnectionsRef.get();
-    }
-
-    @Override
-    public Set<Connection> getConnections(final Relationship relationship) {
-        final Set<Connection> applicableConnections;
-        readLock.lock();
-        try {
-            applicableConnections = connections.get(relationship);
-        } finally {
-            readLock.unlock();
-        }
-        return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
-    }
-
-    @Override
-    public void addConnection(final Connection connection) {
-        Objects.requireNonNull(connection, "connection cannot be null");
-
-        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
-            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
-        }
-
-        writeLock.lock();
-        try {
-            List<Connection> updatedIncoming = null;
-            if (connection.getDestination().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                updatedIncoming = new ArrayList<>(incomingConnections);
-                if (!updatedIncoming.contains(connection)) {
-                    updatedIncoming.add(connection);
-                }
-            }
-
-            if (connection.getSource().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                if (!destinations.containsKey(connection)) {
-                    for (final Relationship relationship : connection.getRelationships()) {
-                        final Relationship rel = getRelationship(relationship.getName());
-                        Set<Connection> set = connections.get(rel);
-                        if (set == null) {
-                            set = new HashSet<>();
-                            connections.put(rel, set);
-                        }
-
-                        set.add(connection);
-
-                        destinations.put(connection, connection.getDestination());
-                    }
-
-                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                    if (autoTerminated != null) {
-                        autoTerminated.removeAll(connection.getRelationships());
-                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                    }
-                }
-            }
-
-            if (updatedIncoming != null) {
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean hasIncomingConnection() {
-        return !incomingConnectionsRef.get().isEmpty();
-    }
-
-    @Override
-    public void updateConnection(final Connection connection) throws IllegalStateException {
-        if (requireNonNull(connection).getSource().equals(this)) {
-            writeLock.lock();
-            try {
-                //
-                // update any relationships
-                //
-                // first check if any relations were removed.
-                final List<Relationship> existingRelationships = new ArrayList<>();
-                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
-                    if (entry.getValue().contains(connection)) {
-                        existingRelationships.add(entry.getKey());
-                    }
-                }
-
-                for (final Relationship rel : connection.getRelationships()) {
-                    if (!existingRelationships.contains(rel)) {
-                        // relationship was removed. Check if this is legal.
-                        final Set<Connection> connectionsForRelationship = getConnections(rel);
-                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
-                            // if we are running and we do not terminate undefined relationships and this is the only
-                            // connection that defines the given relationship, and that relationship is required,
-                            // then it is not legal to remove this relationship from this connection.
-                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
-                                    + this + ", which is currently running");
-                        }
-                    }
-                }
-
-                // remove the connection from any list that currently contains
-                for (final Set<Connection> list : connections.values()) {
-                    list.remove(connection);
-                }
-
-                // add the connection in for all relationships listed.
-                for (final Relationship rel : connection.getRelationships()) {
-                    Set<Connection> set = connections.get(rel);
-                    if (set == null) {
-                        set = new HashSet<>();
-                        connections.put(rel, set);
-                    }
-                    set.add(connection);
-                }
-
-                // update to the new destination
-                destinations.put(connection, connection.getDestination());
-
-                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                if (autoTerminated != null) {
-                    autoTerminated.removeAll(connection.getRelationships());
-                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                // update our incoming connections -- we can just remove & re-add the connection to
-                // update the list.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                updatedIncoming.remove(connection);
-                updatedIncoming.add(connection);
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            } finally {
-                writeLock.unlock();
-            }
-        }
-    }
-
-    @Override
-    public void removeConnection(final Connection connection) {
-        boolean connectionRemoved = false;
-
-        if (requireNonNull(connection).getSource().equals(this)) {
-            for (final Relationship relationship : connection.getRelationships()) {
-                final Set<Connection> connectionsForRelationship = getConnections(relationship);
-                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
-                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
-                }
-            }
-
-            writeLock.lock();
-            try {
-                for (final Set<Connection> connectionList : this.connections.values()) {
-                    connectionList.remove(connection);
-                }
-
-                connectionRemoved = (destinations.remove(connection) != null);
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                if (incomingConnections.contains(connection)) {
-                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                    updatedIncoming.remove(connection);
-                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-                    return;
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (!connectionRemoved) {
-            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
-        }
-    }
-
-    /**
-     * @param relationshipName name
-     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
-     */
-    @Override
-    public Relationship getRelationship(final String relationshipName) {
-        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
-        Relationship returnRel = specRel;
-
-        final Set<Relationship> relationships;
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            relationships = processor.getRelationships();
-        }
-
-        for (final Relationship rel : relationships) {
-            if (rel.equals(specRel)) {
-                returnRel = rel;
-                break;
-            }
-        }
-        return returnRel;
-    }
-
-    @Override
-    public Processor getProcessor() {
-        return this.processor;
-    }
-
-    /**
-     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
-     */
-    public Set<Connectable> getDestinations() {
-        final Set<Connectable> nonSelfDestinations = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Connectable connectable : destinations.values()) {
-                if (connectable != this) {
-                    nonSelfDestinations.add(connectable);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return nonSelfDestinations;
-    }
-
-    public Set<Connectable> getDestinations(final Relationship relationship) {
-        readLock.lock();
-        try {
-            final Set<Connectable> destinationSet = new HashSet<>();
-            final Set<Connection> relationshipConnections = connections.get(relationship);
-            if (relationshipConnections != null) {
-                for (final Connection connection : relationshipConnections) {
-                    destinationSet.add(destinations.get(connection));
-                }
-            }
-            return destinationSet;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    public Set<Relationship> getUndefinedRelationships() {
-        final Set<Relationship> undefined = new HashSet<>();
-        readLock.lock();
-        try {
-            final Set<Relationship> relationships;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                relationships = processor.getRelationships();
-            }
-
-            if (relationships == null) {
-                return undefined;
-            }
-            for (final Relationship relation : relationships) {
-                final Set<Connection> connectionSet = this.connections.get(relation);
-                if (connectionSet == null || connectionSet.isEmpty()) {
-                    undefined.add(relation);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return undefined;
-    }
-
-    /**
-     * Determines if the given node is a destination for this node
-     *
-     * @param node node
-     * @return true if is a direct destination node; false otherwise
-     */
-    boolean isRelated(final ProcessorNode node) {
-        readLock.lock();
-        try {
-            return this.destinations.containsValue(node);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isRunning() {
-        readLock.lock();
-        try {
-            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public int getActiveThreadCount() {
-        readLock.lock();
-        try {
-            return processScheduler.getActiveThreadCount(this);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isValid() {
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    return false;
-                }
-            }
-
-            for (final Relationship undef : getUndefinedRelationships()) {
-                if (!isAutoTerminated(undef)) {
-                    return false;
-                }
-            }
-        } catch (final Throwable t) {
-            return false;
-        } finally {
-            readLock.unlock();
-        }
-
-        return true;
-    }
-
-    @Override
-    public Collection<ValidationResult> getValidationErrors() {
-        final List<ValidationResult> results = new ArrayList<>();
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    results.add(result);
-                }
-            }
-
-            for (final Relationship relationship : getUndefinedRelationships()) {
-                if (!isAutoTerminated(relationship)) {
-                    final ValidationResult error = new ValidationResult.Builder()
-                            .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
-                            .subject("Relationship " + relationship.getName())
-                            .valid(false)
-                            .build();
-                    results.add(error);
-                }
-            }
-        } catch (final Throwable t) {
-            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
-        } finally {
-            readLock.unlock();
-        }
-        return results;
-    }
-
-    /**
-     * Establishes node equality (based on the processor's identifier)
-     *
-     * @param other node
-     * @return true if equal
-     */
-    @Override
-    public boolean equals(final Object other) {
-        if (!(other instanceof ProcessorNode)) {
-            return false;
-        }
-        final ProcessorNode on = (ProcessorNode) other;
-        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
-    }
-
-    @Override
-    public int hashCode() {
-        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
-    }
-
-    @Override
-    public Collection<Relationship> getRelationships() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().getRelationships();
-        }
-    }
-
-    @Override
-    public String toString() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().toString();
-        }
-    }
-
-    @Override
-    public ProcessGroup getProcessGroup() {
-        return processGroup.get();
-    }
-
-    @Override
-    public void setProcessGroup(final ProcessGroup group) {
-        writeLock.lock();
-        try {
-            this.processGroup.set(group);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            processor.onTrigger(context, sessionFactory);
-        }
-    }
-
-    @Override
-    public ConnectableType getConnectableType() {
-        return ConnectableType.PROCESSOR;
-    }
-
-    @Override
-    public void setScheduledState(final ScheduledState scheduledState) {
-        this.scheduledState.set(scheduledState);
-        if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
-            yieldExpiration.set(0L);
-        }
-    }
-
-    @Override
-    public void setAnnotationData(final String data) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
-            }
-
-            this.annotationData.set(data);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public String getAnnotationData() {
-        return annotationData.get();
-    }
-
-    @Override
-    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
-        return processor.validate(validationContext);
-    }
-
-    @Override
-    public void verifyCanDelete() throws IllegalStateException {
-        verifyCanDelete(false);
-    }
-
-    @Override
-    public void verifyCanDelete(final boolean ignoreConnections) {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is running");
-            }
-
-            if (!ignoreConnections) {
-                for (final Set<Connection> connectionSet : connections.values()) {
-                    for (final Connection connection : connectionSet) {
-                        connection.verifyCanDelete();
-                    }
-                }
-
-                for (final Connection connection : incomingConnectionsRef.get()) {
-                    if (connection.getSource().equals(this)) {
-                        connection.verifyCanDelete();
-                    } else {
-                        throw new IllegalStateException(this + " is the destination of another component");
-                    }
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart() {
-        readLock.lock();
-        try {
-            switch (getScheduledState()) {
-                case DISABLED:
-                    throw new IllegalStateException(this + " cannot be started because it is disabled");
-                case RUNNING:
-                    throw new IllegalStateException(this + " cannot be started because it is already running");
-                case STOPPED:
-                    break;
-            }
-            verifyNoActiveThreads();
-
-            if (!isValid()) {
-                throw new IllegalStateException(this + " is not in a valid state");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
-        switch (getScheduledState()) {
-            case DISABLED:
-                throw new IllegalStateException(this + " cannot be started because it is disabled");
-            case RUNNING:
-                throw new IllegalStateException(this + " cannot be started because it is already running");
-            case STOPPED:
-                break;
-        }
-        verifyNoActiveThreads();
-
-        final Set<String> ids = new HashSet<>();
-        for (final ControllerServiceNode node : ignoredReferences) {
-            ids.add(node.getIdentifier());
-        }
-
-        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
-        for (final ValidationResult result : validationResults) {
-            if (!result.isValid()) {
-                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
-            }
-        }
-    }
-
-    @Override
-    public void verifyCanStop() {
-        if (getScheduledState() != ScheduledState.RUNNING) {
-            throw new IllegalStateException(this + " is not scheduled to run");
-        }
-    }
-
-    @Override
-    public void verifyCanUpdate() {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanEnable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.DISABLED) {
-                throw new IllegalStateException(this + " is not disabled");
-            }
-
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanDisable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.STOPPED) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    private void verifyNoActiveThreads() throws IllegalStateException {
-        final int threadCount = processScheduler.getActiveThreadCount(this);
-        if (threadCount > 0) {
-            throw new IllegalStateException(this + " has " + threadCount + " threads still active");
-        }
-    }
-
-    @Override
-    public void verifyModifiable() throws IllegalStateException {
-        if (isRunning()) {
-            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-        }
-    }
+	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
+
+	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
+	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
+	private final AtomicReference<ProcessGroup> processGroup;
+	private final Processor processor;
+	private final AtomicReference<String> identifier;
+	private final Map<Connection, Connectable> destinations;
+	private final Map<Relationship, Set<Connection>> connections;
+	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
+	private final AtomicReference<List<Connection>> incomingConnectionsRef;
+	private final ReentrantReadWriteLock rwLock;
+	private final Lock readLock;
+	private final Lock writeLock;
+	private final AtomicBoolean isolated;
+	private final AtomicBoolean lossTolerant;
+	private final AtomicReference<ScheduledState> scheduledState;
+	private final AtomicReference<String> comments;
+	private final AtomicReference<String> name;
+	private final AtomicReference<Position> position;
+	private final AtomicReference<String> annotationData;
+	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
+	private final AtomicReference<String> yieldPeriod;
+	private final AtomicReference<String> penalizationPeriod;
+	private final AtomicReference<Map<String, String>> style;
+	private final AtomicInteger concurrentTaskCount;
+	private final AtomicLong yieldExpiration;
+	private final AtomicLong schedulingNanos;
+	private final boolean triggerWhenEmpty;
+	private final boolean sideEffectFree;
+	private final boolean triggeredSerially;
+	private final boolean triggerWhenAnyDestinationAvailable;
+	private final boolean eventDrivenSupported;
+	private final boolean batchSupported;
+	private final Requirement inputRequirement;
+	private final ValidationContextFactory validationContextFactory;
+	private final ProcessScheduler processScheduler;
+	private long runNanos = 0L;
+
+	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
+
+	@SuppressWarnings("deprecation")
+	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
+		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
+		super(processor, uuid, validationContextFactory, controllerServiceProvider);
+
+		this.processor = processor;
+		identifier = new AtomicReference<>(uuid);
+		destinations = new HashMap<>();
+		connections = new HashMap<>();
+		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
+		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+		rwLock = new ReentrantReadWriteLock(false);
+		readLock = rwLock.readLock();
+		writeLock = rwLock.writeLock();
+		lossTolerant = new AtomicBoolean(false);
+		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
+		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
+		comments = new AtomicReference<>("");
+		name = new AtomicReference<>(processor.getClass().getSimpleName());
+		schedulingPeriod = new AtomicReference<>("0 sec");
+		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
+		yieldExpiration = new AtomicLong(0L);
+		concurrentTaskCount = new AtomicInteger(1);
+		position = new AtomicReference<>(new Position(0D, 0D));
+		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
+		this.processGroup = new AtomicReference<>();
+		processScheduler = scheduler;
+		annotationData = new AtomicReference<>();
+		isolated = new AtomicBoolean(false);
+		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
+
+		final Class<?> procClass = processor.getClass();
+		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
+		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
+		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
+		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
+		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
+		this.validationContextFactory = validationContextFactory;
+		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
+
+		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
+		if (inputRequirementPresent) {
+			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
+		} else {
+			inputRequirement = Requirement.INPUT_ALLOWED;
+		}
+
+		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
+	}
+
+	/**
+	 * @return comments about this specific processor instance
+	 */
+	@Override
+	public String getComments() {
+		return comments.get();
+	}
+
+	/**
+	 * Provides and opportunity to retain information about this particular processor instance
+	 *
+	 * @param comments new comments
+	 */
+	@Override
+	public void setComments(final String comments) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.comments.set(comments);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public ScheduledState getScheduledState() {
+		return scheduledState.get();
+	}
+
+	@Override
+	public Position getPosition() {
+		return position.get();
+	}
+
+	@Override
+	public void setPosition(Position position) {
+		this.position.set(position);
+	}
+
+	@Override
+	public Map<String, String> getStyle() {
+		return style.get();
+	}
+
+	@Override
+	public void setStyle(final Map<String, String> style) {
+		if (style != null) {
+			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
+		}
+	}
+
+	@Override
+	public String getIdentifier() {
+		return identifier.get();
+	}
+
+	/**
+	 * @return if true flow file content generated by this processor is considered loss tolerant
+	 */
+	@Override
+	public boolean isLossTolerant() {
+		return lossTolerant.get();
+	}
+
+	@Override
+	public boolean isIsolated() {
+		return isolated.get();
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenEmpty() {
+		return triggerWhenEmpty;
+	}
+
+	/**
+	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isSideEffectFree() {
+		return sideEffectFree;
+	}
+
+	@Override
+	public boolean isHighThroughputSupported() {
+		return batchSupported;
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenAnyDestinationAvailable() {
+		return triggerWhenAnyDestinationAvailable;
+	}
+
+	/**
+	 * Indicates whether flow file content made by this processor must be persisted
+	 *
+	 * @param lossTolerant tolerant
+	 */
+	@Override
+	public void setLossTolerant(final boolean lossTolerant) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.lossTolerant.set(lossTolerant);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Indicates whether the processor runs on only the primary node.
+	 *
+	 * @param isolated isolated
+	 */
+	public void setIsolated(final boolean isolated) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.isolated.set(isolated);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isAutoTerminated(final Relationship relationship) {
+		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
+		if (terminatable == null) {
+			return false;
+		}
+		return terminatable.contains(relationship);
+	}
+
+	@Override
+	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			for (final Relationship rel : terminate) {
+				if (!getConnections(rel).isEmpty()) {
+					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
+				}
+			}
+			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
+	 */
+	@Override
+	public Set<Relationship> getAutoTerminatedRelationships() {
+		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
+		if (relationships == null) {
+			relationships = new HashSet<>();
+		}
+		return Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	public String getName() {
+		return name.get();
+	}
+
+	/**
+	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
+	 */
+	@SuppressWarnings("deprecation")
+	public String getProcessorDescription() {
+		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
+		String description = null;
+		if (capDesc != null) {
+			description = capDesc.value();
+		} else {
+			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
+			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
+			if (deprecatedCapDesc != null) {
+				description = deprecatedCapDesc.value();
+			}
+		}
+
+		return description;
+	}
+
+	@Override
+	public void setName(final String name) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.name.set(name);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
+	 */
+	@Override
+	public long getSchedulingPeriod(final TimeUnit timeUnit) {
+		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+	}
+
+	@Override
+	public boolean isEventDrivenSupported() {
+		readLock.lock();
+		try {
+			return this.eventDrivenSupported;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	/**
+	 * Updates the Scheduling Strategy used for this Processor
+	 *
+	 * @param schedulingStrategy strategy
+	 *
+	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
+	 */
+	@Override
+	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
+		writeLock.lock();
+		try {
+			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
+				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
+				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
+				// Mode. Instead, we will simply leave it in Timer-Driven mode
+				return;
+			}
+
+			this.schedulingStrategy = schedulingStrategy;
+			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return the currently configured scheduling strategy
+	 */
+	@Override
+	public SchedulingStrategy getSchedulingStrategy() {
+		readLock.lock();
+		try {
+			return this.schedulingStrategy;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public String getSchedulingPeriod() {
+		return schedulingPeriod.get();
+	}
+
+	@Override
+	public void setScheduldingPeriod(final String schedulingPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			switch (schedulingStrategy) {
+				case CRON_DRIVEN: {
+					try {
+						new CronExpression(schedulingPeriod);
+					} catch (final Exception e) {
+						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
+					}
+				}
+				break;
+				case PRIMARY_NODE_ONLY:
+				case TIMER_DRIVEN: {
+					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+					if (schedulingNanos < 0) {
+						throw new IllegalArgumentException("Scheduling Period must be positive");
+					}
+					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+				}
+				break;
+				case EVENT_DRIVEN:
+				default:
+					return;
+			}
+
+			this.schedulingPeriod.set(schedulingPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getRunDuration(final TimeUnit timeUnit) {
+		readLock.lock();
+		try {
+			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
+		writeLock.lock();
+		try {
+			if (duration < 0) {
+				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
+			}
+
+			this.runNanos = timeUnit.toNanos(duration);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getYieldPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getYieldPeriod() {
+		return yieldPeriod.get();
+	}
+
+	@Override
+	public void setYieldPeriod(final String yieldPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+			if (yieldMillis < 0) {
+				throw new IllegalArgumentException("Yield duration must be positive");
+			}
+			this.yieldPeriod.set(yieldPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+	 * methods.
+	 */
+	@Override
+	public void yield() {
+		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+		yield(yieldMillis, TimeUnit.MILLISECONDS);
+
+		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
+		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
+	}
+
+	@Override
+	public void yield(final long period, final TimeUnit timeUnit) {
+		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
+		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+
+		processScheduler.yield(this);
+	}
+
+	/**
+	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
+	 */
+	@Override
+	public long getYieldExpiration() {
+		return yieldExpiration.get();
+	}
+
+	@Override
+	public long getPenalizationPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getPenalizationPeriod() {
+		return penalizationPeriod.get();
+	}
+
+	@Override
+	public void setPenalizationPeriod(final String penalizationPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
+			if (penalizationMillis < 0) {
+				throw new IllegalArgumentException("Penalization duration must be positive");
+			}
+			this.penalizationPeriod.set(penalizationPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Determines the number of concurrent tasks that may be running for this processor.
+	 *
+	 * @param taskCount a number of concurrent tasks this processor may have running
+	 * @throws IllegalArgumentException if the given value is less than 1
+	 */
+	@Override
+	public void setMaxConcurrentTasks(final int taskCount) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
+				throw new IllegalArgumentException();
+			}
+			if (!triggeredSerially) {
+				concurrentTaskCount.set(taskCount);
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isTriggeredSerially() {
+		return triggeredSerially;
+	}
+
+	/**
+	 * @return the number of tasks that may execute concurrently for this processor
+	 */
+	@Override
+	public int getMaxConcurrentTasks() {
+		return concurrentTaskCount.get();
+	}
+
+	@Override
+	public LogLevel getBulletinLevel() {
+		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
+	}
+
+	@Override
+	public void setBulletinLevel(final LogLevel level) {
+		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
+	}
+
+	@Override
+	public Set<Connection> getConnections() {
+		final Set<Connection> allConnections = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Set<Connection> connectionSet : connections.values()) {
+				allConnections.addAll(connectionSet);
+			}
+		} finally {
+			readLock.unlock();
+		}
+
+		return allConnections;
+	}
+
+	@Override
+	public List<Connection> getIncomingConnections() {
+		return incomingConnectionsRef.get();
+	}
+
+	@Override
+	public Set<Connection> getConnections(final Relationship relationship) {
+		final Set<Connection> applicableConnections;
+		readLock.lock();
+		try {
+			applicableConnections = connections.get(relationship);
+		} finally {
+			readLock.unlock();
+		}
+		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
+	}
+
+	@Override
+	public void addConnection(final Connection connection) {
+		Objects.requireNonNull(connection, "connection cannot be null");
+
+		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
+			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
+		}
+
+		writeLock.lock();
+		try {
+			List<Connection> updatedIncoming = null;
+			if (connection.getDestination().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				updatedIncoming = new ArrayList<>(incomingConnections);
+				if (!updatedIncoming.contains(connection)) {
+					updatedIncoming.add(connection);
+				}
+			}
+
+			if (connection.getSource().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				if (!destinations.containsKey(connection)) {
+					for (final Relationship relationship : connection.getRelationships()) {
+						final Relationship rel = getRelationship(relationship.getName());
+						Set<Connection> set = connections.get(rel);
+						if (set == null) {
+							set = new HashSet<>();
+							connections.put(rel, set);
+						}
+
+						set.add(connection);
+
+						destinations.put(connection, connection.getDestination());
+					}
+
+					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+					if (autoTerminated != null) {
+						autoTerminated.removeAll(connection.getRelationships());
+						this.undefinedRelationshipsToTerminate.set(autoTerminated);
+					}
+				}
+			}
+
+			if (updatedIncoming != null) {
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean hasIncomingConnection() {
+		return !incomingConnectionsRef.get().isEmpty();
+	}
+
+	@Override
+	public void updateConnection(final Connection connection) throws IllegalStateException {
+		if (requireNonNull(connection).getSource().equals(this)) {
+			writeLock.lock();
+			try {
+				//
+				// update any relationships
+				//
+				// first check if any relations were removed.
+				final List<Relationship> existingRelationships = new ArrayList<>();
+				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
+					if (entry.getValue().contains(connection)) {
+						existingRelationships.add(entry.getKey());
+					}
+				}
+
+				for (final Relationship rel : connection.getRelationships()) {
+					if (!existingRelationships.contains(rel)) {
+						// relationship was removed. Check if this is legal.
+						final Set<Connection> connectionsForRelationship = getConnections(rel);
+						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
+							// if we are running and we do not terminate undefined relationships and this is the only
+							// connection that defines the given relationship, and that relationship is required,
+							// then it is not legal to remove this relationship from this connection.
+							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
+								+ this + ", which is currently running");
+						}
+					}
+				}
+
+				// remove the connection from any list that currently contains
+				for (final Set<Connection> list : connections.values()) {
+					list.remove(connection);
+				}
+
+				// add the connection in for all relationships listed.
+				for (final Relationship rel : connection.getRelationships()) {
+					Set<Connection> set = connections.get(rel);
+					if (set == null) {
+						set = new HashSet<>();
+						connections.put(rel, set);
+					}
+					set.add(connection);
+				}
+
+				// update to the new destination
+				destinations.put(connection, connection.getDestination());
+
+				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+				if (autoTerminated != null) {
+					autoTerminated.removeAll(connection.getRelationships());
+					this.undefinedRelationshipsToTerminate.set(autoTerminated);
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				// update our incoming connections -- we can just remove & re-add the connection to
+				// update the list.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+				updatedIncoming.remove(connection);
+				updatedIncoming.add(connection);
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			} finally {
+				writeLock.unlock();
+			}
+		}
+	}
+
+	@Override
+	public void removeConnection(final Connection connection) {
+		boolean connectionRemoved = false;
+
+		if (requireNonNull(connection).getSource().equals(this)) {
+			for (final Relationship relationship : connection.getRelationships()) {
+				final Set<Connection> connectionsForRelationship = getConnections(relationship);
+				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
+					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
+				}
+			}
+
+			writeLock.lock();
+			try {
+				for (final Set<Connection> connectionList : this.connections.values()) {
+					connectionList.remove(connection);
+				}
+
+				connectionRemoved = (destinations.remove(connection) != null);
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				if (incomingConnections.contains(connection)) {
+					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+					updatedIncoming.remove(connection);
+					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+					return;
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (!connectionRemoved) {
+			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
+		}
+	}
+
+	/**
+	 * @param relationshipName name
+	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
+	 */
+	@Override
+	public Relationship getRelationship(final String relationshipName) {
+		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
+		Relationship returnRel = specRel;
+
+		final Set<Relationship> relationships;
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			relationships = processor.getRelationships();
+		}
+
+		for (final Relationship rel : relationships) {
+			if (rel.equals(specRel)) {
+				returnRel = rel;
+				break;
+			}
+		}
+		return returnRel;
+	}
+
+	@Override
+	public Processor getProcessor() {
+		return this.processor;
+	}
+
+	/**
+	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
+	 */
+	public Set<Connectable> getDestinations() {
+		final Set<Connectable> nonSelfDestinations = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Connectable connectable : destinations.values()) {
+				if (connectable != this) {
+					nonSelfDestinations.add(connectable);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return nonSelfDestinations;
+	}
+
+	public Set<Connectable> getDestinations(final Relationship relationship) {
+		readLock.lock();
+		try {
+			final Set<Connectable> destinationSet = new HashSet<>();
+			final Set<Connection> relationshipConnections = connections.get(relationship);
+			if (relationshipConnections != null) {
+				for (final Connection connection : relationshipConnections) {
+					destinationSet.add(destinations.get(connection));
+				}
+			}
+			return destinationSet;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	public Set<Relationship> getUndefinedRelationships() {
+		final Set<Relationship> undefined = new HashSet<>();
+		readLock.lock();
+		try {
+			final Set<Relationship> relationships;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				relationships = processor.getRelationships();
+			}
+
+			if (relationships == null) {
+				return undefined;
+			}
+			for (final Relationship relation : relationships) {
+				final Set<Connection> connectionSet = this.connections.get(relation);
+				if (connectionSet == null || connectionSet.isEmpty()) {
+					undefined.add(relation);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return undefined;
+	}
+
+	/**
+	 * Determines if the given node is a destination for this node
+	 *
+	 * @param node node
+	 * @return true if is a direct destination node; false otherwise
+	 */
+	boolean isRelated(final ProcessorNode node) {
+		readLock.lock();
+		try {
+			return this.destinations.containsValue(node);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isRunning() {
+		readLock.lock();
+		try {
+			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public int getActiveThreadCount() {
+		readLock.lock();
+		try {
+			return processScheduler.getActiveThreadCount(this);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isValid() {
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					return false;
+				}
+			}
+
+			for (final Relationship undef : getUndefinedRelationships()) {
+				if (!isAutoTerminated(undef)) {
+					return false;
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					if (!getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			return false;
+		} finally {
+			readLock.unlock();
+		}
+
+		return true;
+	}
+
+	@Override
+	public Collection<ValidationResult> getValidationErrors() {
+		final List<ValidationResult> results = new ArrayList<>();
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					results.add(result);
+				}
+			}
+
+			for (final Relationship relationship : getUndefinedRelationships()) {
+				if (!isAutoTerminated(relationship)) {
+					final ValidationResult error = new ValidationResult.Builder()
+						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
+						.subject("Relationship " + relationship.getName())
+						.valid(false)
+						.build();
+					results.add(error);
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					final int incomingConnCount = getIncomingConnections().size();
+					if (incomingConnCount != 0) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
+		} finally {
+			readLock.unlock();
+		}
+		return results;
+	}
+
+	@Override
+	public Requirement getInputRequirement() {
+		return inputRequirement;
+	}
+
+	/**
+	 * Establishes node equality (based on the processor's identifier)
+	 *
+	 * @param other node
+	 * @return true if equal
+	 */
+	@Override
+	public boolean equals(final Object other) {
+		if (!(other instanceof ProcessorNode)) {
+			return false;
+		}
+		final ProcessorNode on = (ProcessorNode) other;
+		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
+	}
+
+	@Override
+	public int hashCode() {
+		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
+	}
+
+	@Override
+	public Collection<Relationship> getRelationships() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().getRelationships();
+		}
+	}
+
+	@Override
+	public String toString() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().toString();
+		}
+	}
+
+	@Override
+	public ProcessGroup getProcessGroup() {
+		return processGroup.get();
+	}
+
+	@Override
+	public void setProcessGroup(final ProcessGroup group) {
+		writeLock.lock();
+		try {
+			this.processGroup.set(group);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			processor.onTrigger(context, sessionFactory);
+		}
+	}
+
+	@Override
+	public ConnectableType getConnectableType() {
+		return ConnectableType.PROCESSOR;
+	}
+
+	@Override
+	public void setScheduledState(final ScheduledState scheduledState) {
+		this.scheduledState.set(scheduledState);
+		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
+			yieldExpiration.set(0L);
+		}
+	}
+
+	@Override
+	public void setAnnotationData(final String data) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
+			}
+
+			this.annotationData.set(data);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public String getAnnotationData() {
+		return annotationData.get();
+	}
+
+	@Override
+	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
+		return getValidationErrors();
+	}
+
+	@Override
+	public void verifyCanDelete() throws IllegalStateException {
+		verifyCanDelete(false);
+	}
+
+	@Override
+	public void verifyCanDelete(final boolean ignoreConnections) {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is running");
+			}
+
+			if (!ignoreConnections) {
+				for (final Set<Connection> connectionSet : connections.values()) {
+					for (final Connection connection : connectionSet) {
+						connection.verifyCanDelete();
+					}
+				}
+
+				for (final Connection connection : incomingConnectionsRef.get()) {
+					if (connection.getSource().equals(this)) {
+						connection.verifyCanDelete();
+					} else {
+						throw new IllegalStateException(this + " is the destination of another component");
+					}
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart() {
+		readLock.lock();
+		try {
+			switch (getScheduledState()) {
+				case DISABLED:
+					throw new IllegalStateException(this + " cannot be started because it is disabled");
+				case RUNNING:
+					throw new IllegalStateException(this + " cannot be started because it is already running");
+				case STOPPED:
+					break;
+			}
+			verifyNoActiveThreads();
+
+			if (!isValid()) {
+				throw new IllegalStateException(this + " is not in a valid state");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
+		switch (getScheduledState()) {
+			case DISABLED:
+				throw new IllegalStateException(this + " cannot be started because it is disabled");
+			case RUNNING:
+				throw new IllegalStateException(this + " cannot be started because it is already running");
+			case STOPPED:
+				break;
+		}
+		verifyNoActiveThreads();
+
+		final Set<String> ids = new HashSet<>();
+		for (final ControllerServiceNode node : ignoredReferences) {
+			ids.add(node.getIdentifier());
+		}
+
+		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
+		for (final ValidationResult result : validationResults) {
+			if (!result.isValid()) {
+				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
+			}
+		}
+	}
+
+	@Override
+	public void verifyCanStop() {
+		if (getScheduledState() != ScheduledState.RUNNING) {
+			throw new IllegalStateException(this + " is not scheduled to run");
+		}
+	}
+
+	@Override
+	public void verifyCanUpdate() {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanEnable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.DISABLED) {
+				throw new IllegalStateException(this + " is not disabled");
+			}
+
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanDisable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.STOPPED) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	private void verifyNoActiveThreads() throws IllegalStateException {
+		final int threadCount = processScheduler.getActiveThreadCount(this);
+		if (threadCount > 0) {
+			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
+		}
+	}
+
+	@Override
+	public void verifyModifiable() throws IllegalStateException {
+		if (isRunning()) {
+			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index b766878..eccff79 100644
--- a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -31,6 +31,8 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -59,6 +61,7 @@ import com.maxmind.geoip2.record.Subdivision;
 @SideEffectFree
 @SupportsBatching
 @Tags({"geo", "enrich", "ip", "maxmind"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Looks up geolocation information for an IP address and adds the geo information to FlowFile attributes. The "
         + "geo data is provided as a MaxMind database. The attribute that contains the IP address to lookup is provided by the "
         + "'IP Address Attribute' property. If the name of the attribute provided is 'X', then the the attributes added by enrichment "


[03/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
index cbcc54d..385ac73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
@@ -23,7 +23,8 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -57,6 +58,7 @@ import org.apache.nifi.processors.hadoop.util.SequenceFileWriter;
  *
  */
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "sequence file", "create", "sequencefile"})
 @CapabilityDescription("Creates Hadoop Sequence Files from incoming flow files")
 @SeeAlso(PutHDFS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
index 4a52fb7..aa03e73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -44,6 +46,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "hdfs", "get", "ingest", "fetch", "source"})
 @CapabilityDescription("Retrieves a file from HDFS. The content of the incoming FlowFile is replaced by the content of the file in HDFS. "
         + "The file in HDFS is left intact without any changes being made to it.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
index de776d4..4c9deea 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
@@ -41,6 +41,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -62,6 +64,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
 @CapabilityDescription("Fetch files from Hadoop Distributed File System (HDFS) into FlowFiles. This Processor will delete the file from HDFS after fetching it.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
index 151cbf2..563bda8 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -66,6 +68,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
 @CapabilityDescription("Retrieves a listing of files from HDFS. For each file that is listed in HDFS, creates a FlowFile that represents "
         + "the HDFS file so that it can be fetched in conjunction with ListHDFS. This Processor is designed to run on Primary Node only "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
index 901159b..bedf1b9 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -59,6 +61,7 @@ import org.apache.nifi.util.StopWatch;
 /**
  * This processor copies FlowFiles to HDFS.
  */
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "HDFS", "put", "copy", "filesystem"})
 @CapabilityDescription("Write FlowFile data to Hadoop Distributed File System (HDFS)")
 @WritesAttribute(attribute = "filename", description = "The name of the file written to HDFS comes from the value of this attribute.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
index 574fb2d..3a6ac79 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
@@ -26,6 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,6 +59,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "health level 7", "healthcare", "extract", "attributes"})
 @CapabilityDescription("Extracts information from an HL7 (Health Level 7) formatted FlowFile and adds the information as FlowFile Attributes. "
         + "The attributes are named as <Segment Name> <dot> <Field Index>. If the segment is repeating, the naming will be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
index 53e7e69..26e8bb6 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
@@ -29,6 +29,8 @@ import java.util.Set;
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -63,6 +65,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "healthcare", "route", "Health Level 7"})
 @DynamicProperties({
     @DynamicProperty(name = "Name of a Relationship", value = "An HL7 Query Language query",

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
index 7fe6195..b44eccd 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
@@ -16,11 +16,18 @@
  */
 package org.apache.nifi.processors.image;
 
-import com.drew.imaging.ImageMetadataReader;
-import com.drew.imaging.ImageProcessingException;
-import com.drew.metadata.Directory;
-import com.drew.metadata.Metadata;
-import com.drew.metadata.Tag;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -30,25 +37,22 @@ import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.HashMap;
+import com.drew.imaging.ImageMetadataReader;
+import com.drew.imaging.ImageProcessingException;
+import com.drew.metadata.Directory;
+import com.drew.metadata.Metadata;
+import com.drew.metadata.Tag;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Exif", "Exchangeable", "image", "file", "format", "JPG", "GIF", "PNG", "BMP", "metadata","IPTC", "XMP"})
 @CapabilityDescription("Extract the image metadata from flowfiles containing images. This processor relies on this "
         + "metadata extractor library https://github.com/drewnoakes/metadata-extractor. It extracts a long list of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
index c085b5f..176561f 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
@@ -17,7 +17,27 @@
 
 package org.apache.nifi.processors.image;
 
+import java.awt.Graphics2D;
+import java.awt.Image;
+import java.awt.image.BufferedImage;
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import javax.imageio.ImageIO;
+import javax.imageio.ImageReader;
+import javax.imageio.stream.ImageInputStream;
+
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -33,25 +53,9 @@ import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
-import javax.imageio.ImageIO;
-import javax.imageio.ImageReader;
-import javax.imageio.stream.ImageInputStream;
-import java.awt.Image;
-import java.awt.Graphics2D;
-import java.awt.image.BufferedImage;
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Iterator;
-import java.util.concurrent.TimeUnit;
-
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "resize", "image", "jpg", "jpeg", "png", "bmp", "wbmp", "gif" })
 @CapabilityDescription("Resizes an image to user-specified dimensions. This Processor uses the image codecs registered with the "
     + "environment that NiFi is running in. By default, this includes JPEG, PNG, BMP, WBMP, and GIF images.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
index 26590df..e10977b 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
@@ -32,18 +32,13 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import kafka.consumer.Consumer;
-import kafka.consumer.ConsumerConfig;
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
@@ -58,7 +53,15 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import kafka.consumer.Consumer;
+import kafka.consumer.ConsumerConfig;
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.message.MessageAndMetadata;
+
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches messages from Apache Kafka")
 @Tags({"Kafka", "Apache", "Get", "Ingest", "Ingress", "Topic", "PubSub"})
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
index d83c7bf..cff285c 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
@@ -30,10 +30,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -58,9 +56,13 @@ import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.stream.io.util.NonThreadSafeCircularBuffer;
 import org.apache.nifi.util.LongHolder;
 
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
 import scala.actors.threadpool.Arrays;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "Apache", "Kafka", "Put", "Send", "Message", "PubSub" })
 @CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka")
 public class PutKafka extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
index 6c20a8f..6f126aa 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
@@ -18,18 +18,20 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
+import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -53,11 +55,13 @@ import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.CSVFileReader;
 import org.kitesdk.data.spi.filesystem.CSVProperties;
 
-import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 
 @Tags({"kite", "csv", "avro"})
-@CapabilityDescription(
-        "Converts CSV files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts CSV files to Avro according to an Avro Schema")
 public class ConvertCSVToAvro extends AbstractKiteProcessor {
 
     private static final CSVProperties DEFAULTS = new CSVProperties.Builder().build();

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
index ec1503c..af120bf 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
@@ -18,18 +18,18 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -47,9 +47,13 @@ import org.kitesdk.data.SchemaNotFoundException;
 import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.JSONFileReader;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 @Tags({"kite", "json", "avro"})
-@CapabilityDescription(
-        "Converts JSON files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts JSON files to Avro according to an Avro Schema")
 public class ConvertJSONToAvro extends AbstractKiteProcessor {
 
     private static final Relationship SUCCESS = new Relationship.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
index 7a30db1..1986f0b 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
@@ -18,16 +18,17 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -46,6 +47,10 @@ import org.kitesdk.data.ValidationException;
 import org.kitesdk.data.View;
 import org.kitesdk.data.spi.SchemaValidationUtil;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"kite", "avro", "parquet", "hadoop", "hive", "hdfs", "hbase"})
 @CapabilityDescription("Stores Avro records in a Kite dataset")
 public class StoreInKiteDataset extends AbstractKiteProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
index 8398152..5f58781 100644
--- a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
+++ b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
@@ -33,6 +33,8 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -70,6 +72,7 @@ import com.sun.jersey.api.json.JSONConfiguration;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"yandex", "translate", "translation", "language"})
 @CapabilityDescription("Translates content and attributes from one language to another")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
index a78b112..e41b583 100644
--- a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
+++ b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
@@ -32,6 +32,8 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,8 +59,8 @@ import org.apache.nifi.processor.util.StandardValidators;
 import com.twitter.hbc.ClientBuilder;
 import com.twitter.hbc.core.Client;
 import com.twitter.hbc.core.Constants;
-import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.Location ;
+import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.StatusesFilterEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesFirehoseEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesSampleEndpoint;
@@ -69,6 +71,7 @@ import com.twitter.hbc.httpclient.auth.Authentication;
 import com.twitter.hbc.httpclient.auth.OAuth1;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"twitter", "tweets", "social media", "status", "json"})
 @CapabilityDescription("Pulls status changes from Twitter's streaming API")
 @WritesAttribute(attribute = "mime.type", description = "Sets mime type to application/json")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
index ff264a1..a85aa0f 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
@@ -18,7 +18,29 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
 import org.apache.commons.io.IOUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnRemoved;
@@ -41,27 +63,8 @@ import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
 @Tags({"Apache", "Solr", "Get", "Pull"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Queries Solr and outputs the results as a FlowFile")
 public class GetSolr extends SolrProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
index 560ad34..df034c9 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
@@ -18,7 +18,24 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -40,22 +57,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.MultiMapSolrParams;
 import org.apache.solr.common.util.ContentStreamBase;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
 @Tags({"Apache", "Solr", "Put", "Send"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends the contents of a FlowFile as a ContentStream to Solr")
 @DynamicProperty(name="A Solr request parameter name", value="A Solr request parameter value",
         description="These parameters will be passed to Solr on the request")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
index 9887e38..816b407 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
@@ -29,6 +29,8 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -51,101 +53,102 @@ import org.apache.nifi.util.StopWatch;
 @SupportsBatching
 @Tags({"encode", "base64"})
 @CapabilityDescription("Encodes or decodes content to and from base64")
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class Base64EncodeContent extends AbstractProcessor {
 
-    public static final String ENCODE_MODE = "Encode";
-    public static final String DECODE_MODE = "Decode";
+	public static final String ENCODE_MODE = "Encode";
+	public static final String DECODE_MODE = "Decode";
 
-    public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
-            .name("Mode")
-            .description("Specifies whether the content should be encoded or decoded")
-            .required(true)
-            .allowableValues(ENCODE_MODE, DECODE_MODE)
-            .defaultValue(ENCODE_MODE)
-            .build();
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("Any FlowFile that is successfully encoded or decoded will be routed to success")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
-            .build();
+	public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
+		.name("Mode")
+		.description("Specifies whether the content should be encoded or decoded")
+		.required(true)
+		.allowableValues(ENCODE_MODE, DECODE_MODE)
+		.defaultValue(ENCODE_MODE)
+		.build();
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
+		.build();
 
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
 
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(MODE);
-        this.properties = Collections.unmodifiableList(properties);
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(MODE);
+		this.properties = Collections.unmodifiableList(properties);
 
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        relationships.add(REL_FAILURE);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		relationships.add(REL_FAILURE);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
 
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
 
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
 
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
 
-        final ProcessorLog logger = getLogger();
+		final ProcessorLog logger = getLogger();
 
-        boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
-        try {
-            final StopWatch stopWatch = new StopWatch(true);
-            if (encode) {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64OutputStream bos = new Base64OutputStream(out)) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = in.read(buf)) > 0) {
-                                bos.write(buf, 0, len);
-                            }
-                            bos.flush();
-                        }
-                    }
-                });
-            } else {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = bis.read(buf)) > 0) {
-                                out.write(buf, 0, len);
-                            }
-                            out.flush();
-                        }
-                    }
-                });
-            }
+		boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
+		try {
+			final StopWatch stopWatch = new StopWatch(true);
+			if (encode) {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64OutputStream bos = new Base64OutputStream(out)) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = in.read(buf)) > 0) {
+								bos.write(buf, 0, len);
+							}
+							bos.flush();
+						}
+					}
+				});
+			} else {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = bis.read(buf)) > 0) {
+								out.write(buf, 0, len);
+							}
+							out.flush();
+						}
+					}
+				});
+			}
 
-            logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
-            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-            session.transfer(flowFile, REL_SUCCESS);
-        } catch (ProcessException e) {
-            logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
-            session.transfer(flowFile, REL_FAILURE);
-        }
-    }
+			logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
+			session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
+			session.transfer(flowFile, REL_SUCCESS);
+		} catch (ProcessException e) {
+			logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
+			session.transfer(flowFile, REL_FAILURE);
+		}
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
index 1b9b20c..593cf44 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
@@ -29,20 +29,18 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import lzma.sdk.lzma.Decoder;
-import lzma.streams.LzmaInputStream;
-import lzma.streams.LzmaOutputStream;
-
 import org.apache.commons.compress.compressors.CompressorStreamFactory;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
 import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -63,9 +61,14 @@ import org.tukaani.xz.LZMA2Options;
 import org.tukaani.xz.XZInputStream;
 import org.tukaani.xz.XZOutputStream;
 
+import lzma.sdk.lzma.Decoder;
+import lzma.streams.LzmaInputStream;
+import lzma.streams.LzmaOutputStream;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"content", "compress", "decompress", "gzip", "bzip2", "lzma", "xz-lzma2"})
 @CapabilityDescription("Compresses or decompresses the contents of FlowFiles using a user-specified compression algorithm and updates the mime.type "
     + "attribute as appropriate")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index 2efc852..a45c211 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -31,6 +31,12 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -43,10 +49,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.timebuffer.EntityAccess;
@@ -54,344 +56,345 @@ import org.apache.nifi.util.timebuffer.TimedBuffer;
 
 @SideEffectFree
 @TriggerSerially
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"rate control", "throttle", "rate", "throughput"})
 @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors.")
 public class ControlRate extends AbstractProcessor {
 
-    public static final String DATA_RATE = "data rate";
-    public static final String FLOWFILE_RATE = "flowfile count";
-    public static final String ATTRIBUTE_RATE = "attribute value";
-
-    public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
-            .name("Rate Control Criteria")
-            .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
-            .required(true)
-            .allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
-            .defaultValue(DATA_RATE)
-            .build();
-    public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
-            .name("Maximum Rate")
-            .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
-                    + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
-            .required(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
-            .build();
-    public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Rate Controlled Attribute")
-            .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
-                    + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
-                    + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-    public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
-            .name("Time Duration")
-            .description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
-            .required(true)
-            .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
-            .defaultValue("1 min")
-            .build();
-    public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Grouping Attribute")
-            .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
-                    + "each value specified by the attribute with this name. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("All FlowFiles are transferred to this relationship")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
-            .build();
-
-    private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
-    private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
-
-    private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
-    private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
-
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(RATE_CONTROL_CRITERIA);
-        properties.add(MAX_RATE);
-        properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
-        properties.add(TIME_PERIOD);
-        properties.add(GROUPING_ATTRIBUTE_NAME);
-        this.properties = Collections.unmodifiableList(properties);
-
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
-
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
-
-    @Override
-    protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
-
-        final Validator rateValidator;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
-                break;
-            case ATTRIBUTE_RATE:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-                if (rateAttr == null) {
-                    validationResults.add(new ValidationResult.Builder()
-                            .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
-                            .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
-                            .build());
-                }
-                break;
-            case FLOWFILE_RATE:
-            default:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                break;
-        }
-
-        final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
-        if (!rateResult.isValid()) {
-            validationResults.add(rateResult);
-        }
-
-        return validationResults;
-    }
-
-    @Override
-    public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
-        super.onPropertyModified(descriptor, oldValue, newValue);
-
-        if (descriptor.equals(RATE_CONTROL_CRITERIA)
-                || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
-                || descriptor.equals(GROUPING_ATTRIBUTE_NAME)
-                || descriptor.equals(TIME_PERIOD)) {
-            // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
-            throttleMap.clear();
-        } else if (descriptor.equals(MAX_RATE)) {
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
-                newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(newValue);
-            }
-
-            for (final Throttle throttle : throttleMap.values()) {
-                throttle.setMaxRate(newRate);
-            }
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-        final long lastClearTime = lastThrottleClearTime.get();
-        final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-        if (lastClearTime < throttleExpirationMillis) {
-            if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
-                final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
-                while (itr.hasNext()) {
-                    final Map.Entry<String, Throttle> entry = itr.next();
-                    final Throttle throttle = entry.getValue();
-                    if (throttle.tryLock()) {
-                        try {
-                            if (throttle.lastUpdateTime() < lastClearTime) {
-                                itr.remove();
-                            }
-                        } finally {
-                            throttle.unlock();
-                        }
-                    }
-                }
-            }
-        }
-
-        // TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
-
-        final ProcessorLog logger = getLogger();
-        final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
-        final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-        long rateValue;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValue = flowFile.getSize();
-                break;
-            case FLOWFILE_RATE:
-                rateValue = 1;
-                break;
-            case ATTRIBUTE_RATE:
-                final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
-                if (attributeValue == null) {
-                    logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-
-                if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
-                    logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
-                            new Object[]{flowFile, rateControlAttributeName, attributeValue});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-                rateValue = Long.parseLong(attributeValue);
-                break;
-            default:
-                throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
-        }
-
-        final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
-        final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
-        Throttle throttle = throttleMap.get(groupName);
-        if (throttle == null) {
-            throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
-
-            final String maxRateValue = context.getProperty(MAX_RATE).getValue();
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
-                newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(maxRateValue);
-            }
-            throttle.setMaxRate(newRate);
-
-            throttleMap.put(groupName, throttle);
-        }
-
-        throttle.lock();
-        try {
-            if (throttle.tryAdd(rateValue)) {
-                logger.info("transferring {} to 'success'", new Object[]{flowFile});
-                session.transfer(flowFile, REL_SUCCESS);
-            } else {
-                flowFile = session.penalize(flowFile);
-                session.transfer(flowFile);
-            }
-        } finally {
-            throttle.unlock();
-        }
-    }
-
-    private static class TimestampedLong {
-
-        private final Long value;
-        private final long timestamp = System.currentTimeMillis();
-
-        public TimestampedLong(final Long value) {
-            this.value = value;
-        }
-
-        public Long getValue() {
-            return value;
-        }
-
-        public long getTimestamp() {
-            return timestamp;
-        }
-    }
-
-    private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
-
-        @Override
-        public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
-            if (oldValue == null && toAdd == null) {
-                return new TimestampedLong(0L);
-            } else if (oldValue == null) {
-                return toAdd;
-            } else if (toAdd == null) {
-                return oldValue;
-            }
-
-            return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
-        }
-
-        @Override
-        public TimestampedLong createNew() {
-            return new TimestampedLong(0L);
-        }
-
-        @Override
-        public long getTimestamp(TimestampedLong entity) {
-            return entity == null ? 0L : entity.getTimestamp();
-        }
-    }
-
-    private static class Throttle extends ReentrantLock {
-
-        private final AtomicLong maxRate = new AtomicLong(1L);
-        private final long timePeriodValue;
-        private final TimeUnit timePeriodUnit;
-        private final TimedBuffer<TimestampedLong> timedBuffer;
-        private final ProcessorLog logger;
-
-        private volatile long penalizationExpired;
-        private volatile long lastUpdateTime;
-
-        public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
-            this.timePeriodUnit = unit;
-            this.timePeriodValue = timePeriod;
-            this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
-            this.logger = logger;
-        }
-
-        public void setMaxRate(final long maxRate) {
-            this.maxRate.set(maxRate);
-        }
-
-        public long lastUpdateTime() {
-            return lastUpdateTime;
-        }
-
-        public boolean tryAdd(final long value) {
-            final long now = System.currentTimeMillis();
-            if (penalizationExpired > now) {
-                return false;
-            }
-
-            final long maxRateValue = maxRate.get();
-
-            final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
-            if (sum != null && sum.getValue() >= maxRateValue) {
-                logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
-                return false;
-            }
-
-            logger.debug("current sum for throttle is {}, so allowing rate of {} through",
-                    new Object[]{sum == null ? 0 : sum.getValue(), value});
-
-            final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
-            if (transferred > maxRateValue) {
-                final long amountOver = transferred - maxRateValue;
-                // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
-                final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
-                final double pct = (double) amountOver / (double) maxRateValue;
-                final long penalizationPeriod = (long) (milliDuration * pct);
-                this.penalizationExpired = now + penalizationPeriod;
-                logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
-            }
-
-            lastUpdateTime = now;
-            return true;
-        }
-    }
+	public static final String DATA_RATE = "data rate";
+	public static final String FLOWFILE_RATE = "flowfile count";
+	public static final String ATTRIBUTE_RATE = "attribute value";
+
+	public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
+		.name("Rate Control Criteria")
+		.description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
+		.required(true)
+		.allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
+		.defaultValue(DATA_RATE)
+		.build();
+	public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
+		.name("Maximum Rate")
+		.description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
+			+ "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
+		.required(true)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
+		.build();
+	public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Rate Controlled Attribute")
+		.description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
+			+ "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
+			+ "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+	public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
+		.name("Time Duration")
+		.description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
+		.required(true)
+		.addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
+		.defaultValue("1 min")
+		.build();
+	public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Grouping Attribute")
+		.description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
+			+ "each value specified by the attribute with this name. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("All FlowFiles are transferred to this relationship")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
+		.build();
+
+	private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
+	private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
+
+	private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
+	private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
+
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(RATE_CONTROL_CRITERIA);
+		properties.add(MAX_RATE);
+		properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
+		properties.add(TIME_PERIOD);
+		properties.add(GROUPING_ATTRIBUTE_NAME);
+		this.properties = Collections.unmodifiableList(properties);
+
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
+
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
+
+	@Override
+	protected Collection<ValidationResult> customValidate(final ValidationContext context) {
+		final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
+
+		final Validator rateValidator;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
+				break;
+			case ATTRIBUTE_RATE:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+				if (rateAttr == null) {
+					validationResults.add(new ValidationResult.Builder()
+						.subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+						.explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+						.build());
+				}
+				break;
+			case FLOWFILE_RATE:
+			default:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				break;
+		}
+
+		final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
+		if (!rateResult.isValid()) {
+			validationResults.add(rateResult);
+		}
+
+		return validationResults;
+	}
+
+	@Override
+	public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
+		super.onPropertyModified(descriptor, oldValue, newValue);
+
+		if (descriptor.equals(RATE_CONTROL_CRITERIA)
+			|| descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
+			|| descriptor.equals(GROUPING_ATTRIBUTE_NAME)
+			|| descriptor.equals(TIME_PERIOD)) {
+			// if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
+			throttleMap.clear();
+		} else if (descriptor.equals(MAX_RATE)) {
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
+				newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(newValue);
+			}
+
+			for (final Throttle throttle : throttleMap.values()) {
+				throttle.setMaxRate(newRate);
+			}
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+		final long lastClearTime = lastThrottleClearTime.get();
+		final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+		if (lastClearTime < throttleExpirationMillis) {
+			if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
+				final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
+				while (itr.hasNext()) {
+					final Map.Entry<String, Throttle> entry = itr.next();
+					final Throttle throttle = entry.getValue();
+					if (throttle.tryLock()) {
+						try {
+							if (throttle.lastUpdateTime() < lastClearTime) {
+								itr.remove();
+							}
+						} finally {
+							throttle.unlock();
+						}
+					}
+				}
+			}
+		}
+
+		// TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
+
+		final ProcessorLog logger = getLogger();
+		final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
+		final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+		long rateValue;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValue = flowFile.getSize();
+				break;
+			case FLOWFILE_RATE:
+				rateValue = 1;
+				break;
+			case ATTRIBUTE_RATE:
+				final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
+				if (attributeValue == null) {
+					logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+
+				if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
+					logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
+						new Object[]{flowFile, rateControlAttributeName, attributeValue});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+				rateValue = Long.parseLong(attributeValue);
+				break;
+			default:
+				throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
+		}
+
+		final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
+		final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
+		Throttle throttle = throttleMap.get(groupName);
+		if (throttle == null) {
+			throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
+
+			final String maxRateValue = context.getProperty(MAX_RATE).getValue();
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
+				newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(maxRateValue);
+			}
+			throttle.setMaxRate(newRate);
+
+			throttleMap.put(groupName, throttle);
+		}
+
+		throttle.lock();
+		try {
+			if (throttle.tryAdd(rateValue)) {
+				logger.info("transferring {} to 'success'", new Object[]{flowFile});
+				session.transfer(flowFile, REL_SUCCESS);
+			} else {
+				flowFile = session.penalize(flowFile);
+				session.transfer(flowFile);
+			}
+		} finally {
+			throttle.unlock();
+		}
+	}
+
+	private static class TimestampedLong {
+
+		private final Long value;
+		private final long timestamp = System.currentTimeMillis();
+
+		public TimestampedLong(final Long value) {
+			this.value = value;
+		}
+
+		public Long getValue() {
+			return value;
+		}
+
+		public long getTimestamp() {
+			return timestamp;
+		}
+	}
+
+	private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
+
+		@Override
+		public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
+			if (oldValue == null && toAdd == null) {
+				return new TimestampedLong(0L);
+			} else if (oldValue == null) {
+				return toAdd;
+			} else if (toAdd == null) {
+				return oldValue;
+			}
+
+			return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
+		}
+
+		@Override
+		public TimestampedLong createNew() {
+			return new TimestampedLong(0L);
+		}
+
+		@Override
+		public long getTimestamp(TimestampedLong entity) {
+			return entity == null ? 0L : entity.getTimestamp();
+		}
+	}
+
+	private static class Throttle extends ReentrantLock {
+
+		private final AtomicLong maxRate = new AtomicLong(1L);
+		private final long timePeriodValue;
+		private final TimeUnit timePeriodUnit;
+		private final TimedBuffer<TimestampedLong> timedBuffer;
+		private final ProcessorLog logger;
+
+		private volatile long penalizationExpired;
+		private volatile long lastUpdateTime;
+
+		public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
+			this.timePeriodUnit = unit;
+			this.timePeriodValue = timePeriod;
+			this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
+			this.logger = logger;
+		}
+
+		public void setMaxRate(final long maxRate) {
+			this.maxRate.set(maxRate);
+		}
+
+		public long lastUpdateTime() {
+			return lastUpdateTime;
+		}
+
+		public boolean tryAdd(final long value) {
+			final long now = System.currentTimeMillis();
+			if (penalizationExpired > now) {
+				return false;
+			}
+
+			final long maxRateValue = maxRate.get();
+
+			final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
+			if (sum != null && sum.getValue() >= maxRateValue) {
+				logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
+				return false;
+			}
+
+			logger.debug("current sum for throttle is {}, so allowing rate of {} through",
+				new Object[]{sum == null ? 0 : sum.getValue(), value});
+
+			final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
+			if (transferred > maxRateValue) {
+				final long amountOver = transferred - maxRateValue;
+				// determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
+				final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
+				final double pct = (double) amountOver / (double) maxRateValue;
+				final long penalizationPeriod = (long) (milliDuration * pct);
+				this.penalizationExpired = now + penalizationPeriod;
+				logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
+			}
+
+			lastUpdateTime = now;
+			return true;
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index a0a1364..7a99a59 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -33,8 +33,10 @@ import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
@@ -76,6 +78,7 @@ import java.util.concurrent.TimeUnit;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @SupportsBatching
 @Tags({"text", "convert", "characterset", "character set"})
 @CapabilityDescription("Converts a FlowFile's content from one character set to another")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
index 7eda593..9591960 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
@@ -34,10 +34,12 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -64,6 +66,7 @@ import org.codehaus.jackson.node.JsonNodeFactory;
 @SideEffectFree
 @SupportsBatching
 @SeeAlso(PutSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"json", "sql", "database", "rdbms", "insert", "update", "relational", "flat"})
 @CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE or INSERT SQL statement. The incoming FlowFile is expected to be "
         + "\"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to "


[13/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/4afd8f88
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/4afd8f88
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/4afd8f88

Branch: refs/heads/master
Commit: 4afd8f88f8a34cf87f2a06221667166a54c99a15
Parents: 31fba6b
Author: Mark Payne <ma...@hotmail.com>
Authored: Fri Sep 25 11:39:28 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../annotation/behavior/InputRequirement.java   |   51 +
 .../src/main/asciidoc/developer-guide.adoc      |   11 +
 .../nifi/processors/avro/ConvertAvroToJSON.java |    3 +
 .../processors/avro/ExtractAvroMetadata.java    |   29 +-
 .../apache/nifi/processors/avro/SplitAvro.java  |   27 +-
 .../nifi/processors/aws/s3/FetchS3Object.java   |    3 +
 .../nifi/processors/aws/s3/PutS3Object.java     |    6 +-
 .../apache/nifi/processors/aws/sns/PutSNS.java  |    3 +
 .../nifi/processors/aws/sqs/DeleteSQS.java      |    3 +
 .../apache/nifi/processors/aws/sqs/GetSQS.java  |    5 +-
 .../apache/nifi/processors/aws/sqs/PutSQS.java  |    5 +-
 .../nifi/processors/flume/ExecuteFlumeSink.java |   14 +-
 .../processors/flume/ExecuteFlumeSource.java    |   14 +-
 .../apache/nifi/controller/ProcessorNode.java   |   89 +-
 .../nifi/controller/StandardProcessorNode.java  | 2440 +++++++++---------
 .../org/apache/nifi/processors/GeoEnrichIP.java |    3 +
 .../hadoop/CreateHadoopSequenceFile.java        |    4 +-
 .../nifi/processors/hadoop/FetchHDFS.java       |    3 +
 .../apache/nifi/processors/hadoop/GetHDFS.java  |    3 +
 .../apache/nifi/processors/hadoop/ListHDFS.java |    3 +
 .../apache/nifi/processors/hadoop/PutHDFS.java  |    3 +
 .../processors/hl7/ExtractHL7Attributes.java    |    3 +
 .../apache/nifi/processors/hl7/RouteHL7.java    |    3 +
 .../processors/image/ExtractImageMetadata.java  |   36 +-
 .../nifi/processors/image/ResizeImage.java      |   38 +-
 .../apache/nifi/processors/kafka/GetKafka.java  |   21 +-
 .../apache/nifi/processors/kafka/PutKafka.java  |   10 +-
 .../nifi/processors/kite/ConvertCSVToAvro.java  |   16 +-
 .../nifi/processors/kite/ConvertJSONToAvro.java |   14 +-
 .../processors/kite/StoreInKiteDataset.java     |    9 +-
 .../nifi/processors/yandex/YandexTranslate.java |    3 +
 .../nifi-pcap-processors/.gitignore             |    1 +
 .../nifi/processors/twitter/GetTwitter.java     |    5 +-
 .../apache/nifi/processors/solr/GetSolr.java    |   43 +-
 .../processors/solr/PutSolrContentStream.java   |   33 +-
 .../standard/Base64EncodeContent.java           |  171 +-
 .../processors/standard/CompressContent.java    |   15 +-
 .../nifi/processors/standard/ControlRate.java   |  683 ++---
 .../standard/ConvertCharacterSet.java           |    3 +
 .../processors/standard/ConvertJSONToSQL.java   |    3 +
 .../processors/standard/DetectDuplicate.java    |    3 +
 .../processors/standard/DistributeLoad.java     |    3 +
 .../processors/standard/DuplicateFlowFile.java  |    3 +
 .../nifi/processors/standard/EncodeContent.java |   15 +-
 .../processors/standard/EncryptContent.java     |    3 +
 .../processors/standard/EvaluateJsonPath.java   |   38 +-
 .../nifi/processors/standard/EvaluateXPath.java |   29 +-
 .../processors/standard/EvaluateXQuery.java     |   25 +-
 .../processors/standard/ExecuteProcess.java     |    3 +
 .../nifi/processors/standard/ExecuteSQL.java    |    3 +
 .../standard/ExecuteStreamCommand.java          |    7 +-
 .../nifi/processors/standard/ExtractText.java   |    3 +
 .../processors/standard/GenerateFlowFile.java   |   11 +-
 .../apache/nifi/processors/standard/GetFTP.java |   13 +-
 .../nifi/processors/standard/GetFile.java       |    7 +-
 .../nifi/processors/standard/GetHTTP.java       |    3 +
 .../nifi/processors/standard/GetJMSQueue.java   |    3 +
 .../nifi/processors/standard/GetJMSTopic.java   |    3 +
 .../nifi/processors/standard/GetSFTP.java       |    7 +-
 .../processors/standard/HandleHttpRequest.java  |    7 +-
 .../processors/standard/HandleHttpResponse.java |    5 +-
 .../nifi/processors/standard/HashAttribute.java |    5 +-
 .../nifi/processors/standard/HashContent.java   |    5 +-
 .../processors/standard/IdentifyMimeType.java   |    5 +-
 .../nifi/processors/standard/InvokeHTTP.java    |    3 +
 .../nifi/processors/standard/ListenHTTP.java    |   16 +-
 .../nifi/processors/standard/ListenUDP.java     |   18 +-
 .../nifi/processors/standard/LogAttribute.java  |   16 +-
 .../nifi/processors/standard/MergeContent.java  |   11 +-
 .../nifi/processors/standard/ModifyBytes.java   |   14 +-
 .../processors/standard/MonitorActivity.java    |   31 +-
 .../nifi/processors/standard/PostHTTP.java      |    3 +
 .../nifi/processors/standard/PutEmail.java      |    3 +
 .../apache/nifi/processors/standard/PutFTP.java |    3 +
 .../nifi/processors/standard/PutFile.java       |    3 +
 .../apache/nifi/processors/standard/PutJMS.java |    5 +-
 .../nifi/processors/standard/PutSFTP.java       |    3 +
 .../apache/nifi/processors/standard/PutSQL.java |    3 +
 .../nifi/processors/standard/ReplaceText.java   |   54 +-
 .../standard/ReplaceTextWithMapping.java        |   18 +-
 .../processors/standard/RouteOnAttribute.java   |    3 +
 .../processors/standard/RouteOnContent.java     |   19 +-
 .../nifi/processors/standard/ScanAttribute.java |   19 +-
 .../nifi/processors/standard/ScanContent.java   |    5 +-
 .../processors/standard/SegmentContent.java     |    7 +-
 .../nifi/processors/standard/SplitContent.java  |    7 +-
 .../nifi/processors/standard/SplitJson.java     |   32 +-
 .../nifi/processors/standard/SplitText.java     |   53 +-
 .../nifi/processors/standard/SplitXml.java      |   18 +-
 .../nifi/processors/standard/TransformXml.java  |    3 +
 .../nifi/processors/standard/UnpackContent.java |    9 +-
 .../nifi/processors/standard/ValidateXml.java   |   16 +-
 .../processors/attributes/UpdateAttribute.java  |    9 +-
 93 files changed, 2418 insertions(+), 2027 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
----------------------------------------------------------------------
diff --git a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
new file mode 100644
index 0000000..97e6b88
--- /dev/null
+++ b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
@@ -0,0 +1,51 @@
+package org.apache.nifi.annotation.behavior;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * <p>
+ * Marker interface that a Processor can use to indicate whether it accepts, requires, or forbids
+ * input from other Processors. This information is used by the framework in order to ensure that
+ * a Processor is marked as invalid if it is missing necessary input or has input that will be ignored.
+ * This information also is used by the NiFi UI in order to prevent users from making connections
+ * to Processors that don't make sense.
+ * </p>
+ */
+@Documented
+@Target({ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+@Inherited
+public @interface InputRequirement {
+	Requirement value();
+	
+	public static enum Requirement {
+		/**
+		 * This value is used to indicate that the Processor requires input from other Processors
+		 * in order to run. As a result, the Processor will not be valid if it does not have any
+		 * incoming connections.
+		 */
+		INPUT_REQUIRED,
+		
+		/**
+		 * This value is used to indicate that the Processor will consume data from an incoming
+		 * connection but does not require an incoming connection in order to perform its task.
+		 * If the {@link InputRequirement} annotation is not present, this is the default value
+		 * that is used.
+		 */
+		INPUT_ALLOWED,
+		
+		/**
+		 * This value is used to indicate that the Processor is a "Source Processor" and does
+		 * not accept incoming connections. Because the Processor does not pull FlowFiles from
+		 * an incoming connection, it can be very confusing for users who create incoming connections
+		 * to the Processor. As a result, this value can be used in order to clarify that incoming
+		 * connections will not be used. This prevents the user from even creating such a connection.
+		 */
+		INPUT_FORBIDDEN;
+	}
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-docs/src/main/asciidoc/developer-guide.adoc
----------------------------------------------------------------------
diff --git a/nifi-docs/src/main/asciidoc/developer-guide.adoc b/nifi-docs/src/main/asciidoc/developer-guide.adoc
index f9950d5..28df5c2 100644
--- a/nifi-docs/src/main/asciidoc/developer-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/developer-guide.adoc
@@ -1633,6 +1633,17 @@ will handle your Processor:
 		not there is any data on an input queue. This is useful, for example, if the Processor needs to be triggered to run
 		periodically to time out a network connection.
 
+    - `InputRequirement`: By default, all Processors will allow users to create incoming connections for the Processor, but
+        if the user does not create an incoming connection, the Processor is still valid and can be scheduled to run. For Processors
+        that are expected to be used as a "Source Processor," though, this can be confusing to the user, and the user may attempt to
+        send FlowFiles to that Processor, only for the FlowFiles to queue up without being processed. Conversely, if the Processor
+        expects incoming FlowFiles but does not have an input queue, the Processor will be scheduled to run but will perform no work,
+        as it will receive no FlowFile, and this leads to confusion as well. As a result, we can use the `@InputRequirement` annotation
+        and provide it a value of `INPUT_REQUIRED`, `INPUT_ALLOWED`, or `INPUT_FORBIDDEN`. This provides information to the framework
+        about when the Processor should be made invalid, or whether or not the user should even be able to draw a Connection to the
+        Processor. For instance, if a Processor is annotated with `InputRequirement(Requirement.INPUT_FORBIDDEN)`, then the user will
+        not even be able to create a Connection with that Processor as the destination.
+
 
 === Data Buffering
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
index 8832a73..b214427 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
@@ -29,6 +29,8 @@ import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -46,6 +48,7 @@ import org.apache.nifi.processor.io.StreamCallback;
 @SideEffectFree
 @SupportsBatching
 @Tags({ "json", "avro", "binary" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Converts a Binary Avro record into a JSON object. This processor provides a direct mapping of an Avro field to a JSON field, such "
     + "that the resulting JSON will have the same hierarchical structure as the Avro document. Note that the Avro schema information will be lost, as this "
     + "is not a translation from binary Avro to JSON formatted Avro. The output JSON is encoded the UTF-8 encoding. If an incoming FlowFile contains a stream of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
index 48aad7d..4cf5289 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
@@ -16,6 +16,19 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.SchemaNormalization;
 import org.apache.avro.file.DataFileStream;
@@ -23,6 +36,8 @@ import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -41,22 +56,10 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "schema", "metadata" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Extracts metadata from the header of an Avro datafile.")
 @WritesAttributes({
         @WritesAttribute(attribute = "schema.type", description = "The type of the schema (i.e. record, enum, etc.)."),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
index 3b344b5..dbf5778 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
@@ -16,6 +16,18 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.file.DataFileStream;
@@ -26,6 +38,8 @@ import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.io.EncoderFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -45,21 +59,10 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "split" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a binary encoded Avro datafile into smaller files based on the configured Output Size. The Output Strategy determines if " +
         "the smaller files will be Avro datafiles, or bare Avro records with metadata in the FlowFile attributes. The output will always be binary encoded.")
 public class SplitAvro extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
index 2406b67..131e671 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -45,6 +47,7 @@ import com.amazonaws.services.s3.model.S3Object;
 
 @SupportsBatching
 @SeeAlso({PutS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Get", "Fetch"})
 @CapabilityDescription("Retrieves the contents of an S3 Object and writes it to the content of a FlowFile")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 24c82dd..7398c4e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -54,6 +56,7 @@ import com.amazonaws.services.s3.model.StorageClass;
 
 @SupportsBatching
 @SeeAlso({FetchS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Archive", "Put"})
 @CapabilityDescription("Puts FlowFiles to an Amazon S3 Bucket")
 @DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
@@ -101,7 +104,8 @@ public class PutS3Object extends AbstractS3Processor {
                 .build();
     }
 
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
+    @Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
         FlowFile flowFile = session.get();
         if (flowFile == null) {
             return;

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
index 7d42703..e571ff4 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
@@ -23,6 +23,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import com.amazonaws.services.sns.model.PublishRequest;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"amazon", "aws", "sns", "topic", "put", "publish", "pubsub"})
 @CapabilityDescription("Sends the content of a FlowFile as a notification to the Amazon Simple Notification Service")
 public class PutSNS extends AbstractSNSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
index 65e020d..f88aa71 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
@@ -21,6 +21,8 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -37,6 +39,7 @@ import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Delete"})
 @CapabilityDescription("Deletes a message from an Amazon Simple Queuing Service Queue")
 public class DeleteSQS extends AbstractSQSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
index 7c2dd2d..a140999 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -51,8 +53,9 @@ import com.amazonaws.services.sqs.model.ReceiveMessageRequest;
 import com.amazonaws.services.sqs.model.ReceiveMessageResult;
 
 @SupportsBatching
+@SeeAlso({ PutSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Get", "Fetch", "Poll"})
-@SeeAlso({PutSQS.class, DeleteSQS.class})
 @CapabilityDescription("Fetches messages from an Amazon Simple Queuing Service Queue")
 @WritesAttributes({
     @WritesAttribute(attribute = "hash.value", description = "The MD5 sum of the message"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
index 3961f32..0af508e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
@@ -28,6 +28,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -45,8 +47,9 @@ import com.amazonaws.services.sqs.model.SendMessageBatchRequest;
 import com.amazonaws.services.sqs.model.SendMessageBatchRequestEntry;
 
 @SupportsBatching
+@SeeAlso({ GetSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Put", "Publish"})
-@SeeAlso({GetSQS.class, DeleteSQS.class})
 @CapabilityDescription("Publishes a message to an Amazon Simple Queuing Service Queue")
 @DynamicProperty(name = "The name of a Message Attribute to add to the message", value = "The value of the Message Attribute",
         description = "Allows the user to add key/value pairs as Message Attributes by adding a property whose name will become the name of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
index 57e0278..f93b215 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
@@ -16,20 +16,19 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.Sink;
 import org.apache.flume.conf.Configurables;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -40,12 +39,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume sink
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "put", "sink"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Execute a Flume sink. Each input FlowFile is converted into a Flume Event for processing by the sink.")
-@TriggerSerially
 public class ExecuteFlumeSink extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SINK_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
index 600f4b1..3aad6b7 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
@@ -16,12 +16,10 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.EventDrivenSource;
 import org.apache.flume.PollableSource;
@@ -29,12 +27,13 @@ import org.apache.flume.Source;
 import org.apache.flume.channel.ChannelProcessor;
 import org.apache.flume.conf.Configurables;
 import org.apache.flume.source.EventDrivenSourceRunner;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -46,12 +45,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume source
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "get", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Execute a Flume source. Each Flume Event is sent to the success relationship as a FlowFile")
-@TriggerSerially
 public class ExecuteFlumeSource extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SOURCE_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
index f2a83d0..2f72d0f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
@@ -20,6 +20,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.connectable.Connectable;
 import org.apache.nifi.controller.service.ControllerServiceNode;
 import org.apache.nifi.controller.service.ControllerServiceProvider;
@@ -30,70 +31,72 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
 
 public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
 
-    public ProcessorNode(final Processor processor, final String id,
-            final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
-        super(processor, id, validationContextFactory, serviceProvider);
-    }
+	public ProcessorNode(final Processor processor, final String id,
+		final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
+		super(processor, id, validationContextFactory, serviceProvider);
+	}
 
-    public abstract boolean isIsolated();
+	public abstract boolean isIsolated();
 
-    public abstract boolean isTriggerWhenAnyDestinationAvailable();
+	public abstract boolean isTriggerWhenAnyDestinationAvailable();
 
-    @Override
-    public abstract boolean isSideEffectFree();
+	@Override
+	public abstract boolean isSideEffectFree();
 
-    public abstract boolean isTriggeredSerially();
+	public abstract boolean isTriggeredSerially();
 
-    public abstract boolean isEventDrivenSupported();
+	public abstract boolean isEventDrivenSupported();
 
-    public abstract boolean isHighThroughputSupported();
+	public abstract boolean isHighThroughputSupported();
 
-    @Override
-    public abstract boolean isValid();
+	public abstract Requirement getInputRequirement();
 
-    public abstract void setScheduledState(ScheduledState scheduledState);
+	@Override
+	public abstract boolean isValid();
 
-    public abstract void setBulletinLevel(LogLevel bulletinLevel);
+	public abstract void setScheduledState(ScheduledState scheduledState);
 
-    public abstract LogLevel getBulletinLevel();
+	public abstract void setBulletinLevel(LogLevel bulletinLevel);
 
-    public abstract Processor getProcessor();
+	public abstract LogLevel getBulletinLevel();
 
-    public abstract void yield(long period, TimeUnit timeUnit);
+	public abstract Processor getProcessor();
 
-    public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
+	public abstract void yield(long period, TimeUnit timeUnit);
 
-    public abstract Set<Relationship> getAutoTerminatedRelationships();
+	public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
 
-    public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
+	public abstract Set<Relationship> getAutoTerminatedRelationships();
 
-    @Override
-    public abstract SchedulingStrategy getSchedulingStrategy();
+	public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
 
-    public abstract void setRunDuration(long duration, TimeUnit timeUnit);
+	@Override
+	public abstract SchedulingStrategy getSchedulingStrategy();
 
-    public abstract long getRunDuration(TimeUnit timeUnit);
+	public abstract void setRunDuration(long duration, TimeUnit timeUnit);
 
-    public abstract Map<String, String> getStyle();
+	public abstract long getRunDuration(TimeUnit timeUnit);
 
-    public abstract void setStyle(Map<String, String> style);
+	public abstract Map<String, String> getStyle();
 
-    /**
-     * @return the number of threads (concurrent tasks) currently being used by
-     * this Processor
-     */
-    public abstract int getActiveThreadCount();
+	public abstract void setStyle(Map<String, String> style);
 
-    /**
-     * Verifies that this Processor can be started if the provided set of
-     * services are enabled. This is introduced because we need to verify that
-     * all components can be started before starting any of them. In order to do
-     * that, we need to know that this component can be started if the given
-     * services are enabled, as we will then enable the given services before
-     * starting this component.
-     *
-     * @param ignoredReferences to ignore
-     */
-    public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
+	/**
+	 * @return the number of threads (concurrent tasks) currently being used by
+	 * this Processor
+	 */
+	public abstract int getActiveThreadCount();
+
+	/**
+	 * Verifies that this Processor can be started if the provided set of
+	 * services are enabled. This is introduced because we need to verify that
+	 * all components can be started before starting any of them. In order to do
+	 * that, we need to know that this component can be started if the given
+	 * services are enabled, as we will then enable the given services before
+	 * starting this component.
+	 *
+	 * @param ignoredReferences to ignore
+	 */
+	public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
 
 }


[14/19] nifi git commit: NIFI-810: rebased from master

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/b974445d/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --cc nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index 3c816d0,0c39eda..f69c510
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@@ -74,1241 -74,1242 +74,1241 @@@ import org.slf4j.LoggerFactory
   */
  public class StandardProcessorNode extends ProcessorNode implements Connectable {
  
 -	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
 -
 -	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
 -	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
 -	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
 -	private final AtomicReference<ProcessGroup> processGroup;
 -	private final Processor processor;
 -	private final AtomicReference<String> identifier;
 -	private final Map<Connection, Connectable> destinations;
 -	private final Map<Relationship, Set<Connection>> connections;
 -	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
 -	private final AtomicReference<List<Connection>> incomingConnectionsRef;
 -	private final ReentrantReadWriteLock rwLock;
 -	private final Lock readLock;
 -	private final Lock writeLock;
 -	private final AtomicBoolean isolated;
 -	private final AtomicBoolean lossTolerant;
 -	private final AtomicReference<ScheduledState> scheduledState;
 -	private final AtomicReference<String> comments;
 -	private final AtomicReference<String> name;
 -	private final AtomicReference<Position> position;
 -	private final AtomicReference<String> annotationData;
 -	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
 -	private final AtomicReference<String> yieldPeriod;
 -	private final AtomicReference<String> penalizationPeriod;
 -	private final AtomicReference<Map<String, String>> style;
 -	private final AtomicInteger concurrentTaskCount;
 -	private final AtomicLong yieldExpiration;
 -	private final AtomicLong schedulingNanos;
 -	private final boolean triggerWhenEmpty;
 -	private final boolean sideEffectFree;
 -	private final boolean triggeredSerially;
 -	private final boolean triggerWhenAnyDestinationAvailable;
 -	private final boolean eventDrivenSupported;
 -	private final boolean batchSupported;
 -	private final Requirement inputRequirement;
 -	private final ValidationContextFactory validationContextFactory;
 -	private final ProcessScheduler processScheduler;
 -	private long runNanos = 0L;
 -
 -	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
 -
 -	@SuppressWarnings("deprecation")
 -	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
 -		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
 -		super(processor, uuid, validationContextFactory, controllerServiceProvider);
 -
 -		this.processor = processor;
 -		identifier = new AtomicReference<>(uuid);
 -		destinations = new HashMap<>();
 -		connections = new HashMap<>();
 -		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
 -		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
 -		rwLock = new ReentrantReadWriteLock(false);
 -		readLock = rwLock.readLock();
 -		writeLock = rwLock.writeLock();
 -		lossTolerant = new AtomicBoolean(false);
 -		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
 -		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
 -		comments = new AtomicReference<>("");
 -		name = new AtomicReference<>(processor.getClass().getSimpleName());
 -		schedulingPeriod = new AtomicReference<>("0 sec");
 -		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
 -		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
 -		yieldExpiration = new AtomicLong(0L);
 -		concurrentTaskCount = new AtomicInteger(1);
 -		position = new AtomicReference<>(new Position(0D, 0D));
 -		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
 -		this.processGroup = new AtomicReference<>();
 -		processScheduler = scheduler;
 -		annotationData = new AtomicReference<>();
 -		isolated = new AtomicBoolean(false);
 -		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
 -
 -		final Class<?> procClass = processor.getClass();
 -		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
 -		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
 -		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
 -		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
 -		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
 -			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
 -		this.validationContextFactory = validationContextFactory;
 -		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
 -			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
 -
 -		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
 -		if (inputRequirementPresent) {
 -			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
 -		} else {
 -			inputRequirement = Requirement.INPUT_ALLOWED;
 -		}
 -
 -		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
 -	}
 -
 -	/**
 -	 * @return comments about this specific processor instance
 -	 */
 -	@Override
 -	public String getComments() {
 -		return comments.get();
 -	}
 -
 -	/**
 -	 * Provides and opportunity to retain information about this particular processor instance
 -	 *
 -	 * @param comments new comments
 -	 */
 -	@Override
 -	public void setComments(final String comments) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.comments.set(comments);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public ScheduledState getScheduledState() {
 -		return scheduledState.get();
 -	}
 -
 -	@Override
 -	public Position getPosition() {
 -		return position.get();
 -	}
 -
 -	@Override
 -	public void setPosition(Position position) {
 -		this.position.set(position);
 -	}
 -
 -	@Override
 -	public Map<String, String> getStyle() {
 -		return style.get();
 -	}
 -
 -	@Override
 -	public void setStyle(final Map<String, String> style) {
 -		if (style != null) {
 -			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
 -		}
 -	}
 -
 -	@Override
 -	public String getIdentifier() {
 -		return identifier.get();
 -	}
 -
 -	/**
 -	 * @return if true flow file content generated by this processor is considered loss tolerant
 -	 */
 -	@Override
 -	public boolean isLossTolerant() {
 -		return lossTolerant.get();
 -	}
 -
 -	@Override
 -	public boolean isIsolated() {
 -		return isolated.get();
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isTriggerWhenEmpty() {
 -		return triggerWhenEmpty;
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isSideEffectFree() {
 -		return sideEffectFree;
 -	}
 -
 -	@Override
 -	public boolean isHighThroughputSupported() {
 -		return batchSupported;
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isTriggerWhenAnyDestinationAvailable() {
 -		return triggerWhenAnyDestinationAvailable;
 -	}
 -
 -	/**
 -	 * Indicates whether flow file content made by this processor must be persisted
 -	 *
 -	 * @param lossTolerant tolerant
 -	 */
 -	@Override
 -	public void setLossTolerant(final boolean lossTolerant) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.lossTolerant.set(lossTolerant);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Indicates whether the processor runs on only the primary node.
 -	 *
 -	 * @param isolated isolated
 -	 */
 -	public void setIsolated(final boolean isolated) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.isolated.set(isolated);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isAutoTerminated(final Relationship relationship) {
 -		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
 -		if (terminatable == null) {
 -			return false;
 -		}
 -		return terminatable.contains(relationship);
 -	}
 -
 -	@Override
 -	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -
 -			for (final Relationship rel : terminate) {
 -				if (!getConnections(rel).isEmpty()) {
 -					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
 -				}
 -			}
 -			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
 -	 */
 -	@Override
 -	public Set<Relationship> getAutoTerminatedRelationships() {
 -		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
 -		if (relationships == null) {
 -			relationships = new HashSet<>();
 -		}
 -		return Collections.unmodifiableSet(relationships);
 -	}
 -
 -	@Override
 -	public String getName() {
 -		return name.get();
 -	}
 -
 -	/**
 -	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
 -	 */
 -	@SuppressWarnings("deprecation")
 -	public String getProcessorDescription() {
 -		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
 -		String description = null;
 -		if (capDesc != null) {
 -			description = capDesc.value();
 -		} else {
 -			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
 -			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
 -			if (deprecatedCapDesc != null) {
 -				description = deprecatedCapDesc.value();
 -			}
 -		}
 -
 -		return description;
 -	}
 -
 -	@Override
 -	public void setName(final String name) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.name.set(name);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
 -	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
 -	 */
 -	@Override
 -	public long getSchedulingPeriod(final TimeUnit timeUnit) {
 -		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
 -	}
 -
 -	@Override
 -	public boolean isEventDrivenSupported() {
 -		readLock.lock();
 -		try {
 -			return this.eventDrivenSupported;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Updates the Scheduling Strategy used for this Processor
 -	 *
 -	 * @param schedulingStrategy strategy
 -	 *
 -	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
 -	 */
 -	@Override
 -	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
 -		writeLock.lock();
 -		try {
 -			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
 -				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
 -				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
 -				// Mode. Instead, we will simply leave it in Timer-Driven mode
 -				return;
 -			}
 -
 -			this.schedulingStrategy = schedulingStrategy;
 -			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @return the currently configured scheduling strategy
 -	 */
 -	@Override
 -	public SchedulingStrategy getSchedulingStrategy() {
 -		readLock.lock();
 -		try {
 -			return this.schedulingStrategy;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public String getSchedulingPeriod() {
 -		return schedulingPeriod.get();
 -	}
 -
 -	@Override
 -	public void setScheduldingPeriod(final String schedulingPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -
 -			switch (schedulingStrategy) {
 -				case CRON_DRIVEN: {
 -					try {
 -						new CronExpression(schedulingPeriod);
 -					} catch (final Exception e) {
 -						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
 -					}
 -				}
 -				break;
 -				case PRIMARY_NODE_ONLY:
 -				case TIMER_DRIVEN: {
 -					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
 -					if (schedulingNanos < 0) {
 -						throw new IllegalArgumentException("Scheduling Period must be positive");
 -					}
 -					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
 -				}
 -				break;
 -				case EVENT_DRIVEN:
 -				default:
 -					return;
 -			}
 -
 -			this.schedulingPeriod.set(schedulingPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public long getRunDuration(final TimeUnit timeUnit) {
 -		readLock.lock();
 -		try {
 -			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
 -		writeLock.lock();
 -		try {
 -			if (duration < 0) {
 -				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
 -			}
 -
 -			this.runNanos = timeUnit.toNanos(duration);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public long getYieldPeriod(final TimeUnit timeUnit) {
 -		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 -	}
 -
 -	@Override
 -	public String getYieldPeriod() {
 -		return yieldPeriod.get();
 -	}
 -
 -	@Override
 -	public void setYieldPeriod(final String yieldPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
 -			if (yieldMillis < 0) {
 -				throw new IllegalArgumentException("Yield duration must be positive");
 -			}
 -			this.yieldPeriod.set(yieldPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
 -	 * methods.
 -	 */
 -	@Override
 -	public void yield() {
 -		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
 -		yield(yieldMillis, TimeUnit.MILLISECONDS);
 -
 -		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
 -		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
 -	}
 -
 -	@Override
 -	public void yield(final long period, final TimeUnit timeUnit) {
 -		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
 -		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
 -
 -		processScheduler.yield(this);
 -	}
 -
 -	/**
 -	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
 -	 */
 -	@Override
 -	public long getYieldExpiration() {
 -		return yieldExpiration.get();
 -	}
 -
 -	@Override
 -	public long getPenalizationPeriod(final TimeUnit timeUnit) {
 -		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 -	}
 -
 -	@Override
 -	public String getPenalizationPeriod() {
 -		return penalizationPeriod.get();
 -	}
 -
 -	@Override
 -	public void setPenalizationPeriod(final String penalizationPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
 -			if (penalizationMillis < 0) {
 -				throw new IllegalArgumentException("Penalization duration must be positive");
 -			}
 -			this.penalizationPeriod.set(penalizationPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Determines the number of concurrent tasks that may be running for this processor.
 -	 *
 -	 * @param taskCount a number of concurrent tasks this processor may have running
 -	 * @throws IllegalArgumentException if the given value is less than 1
 -	 */
 -	@Override
 -	public void setMaxConcurrentTasks(final int taskCount) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
 -				throw new IllegalArgumentException();
 -			}
 -			if (!triggeredSerially) {
 -				concurrentTaskCount.set(taskCount);
 -			}
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isTriggeredSerially() {
 -		return triggeredSerially;
 -	}
 -
 -	/**
 -	 * @return the number of tasks that may execute concurrently for this processor
 -	 */
 -	@Override
 -	public int getMaxConcurrentTasks() {
 -		return concurrentTaskCount.get();
 -	}
 -
 -	@Override
 -	public LogLevel getBulletinLevel() {
 -		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
 -	}
 -
 -	@Override
 -	public void setBulletinLevel(final LogLevel level) {
 -		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
 -	}
 -
 -	@Override
 -	public Set<Connection> getConnections() {
 -		final Set<Connection> allConnections = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			for (final Set<Connection> connectionSet : connections.values()) {
 -				allConnections.addAll(connectionSet);
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -
 -		return allConnections;
 -	}
 -
 -	@Override
 -	public List<Connection> getIncomingConnections() {
 -		return incomingConnectionsRef.get();
 -	}
 -
 -	@Override
 -	public Set<Connection> getConnections(final Relationship relationship) {
 -		final Set<Connection> applicableConnections;
 -		readLock.lock();
 -		try {
 -			applicableConnections = connections.get(relationship);
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
 -	}
 -
 -	@Override
 -	public void addConnection(final Connection connection) {
 -		Objects.requireNonNull(connection, "connection cannot be null");
 -
 -		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
 -			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
 -		}
 -
 -		writeLock.lock();
 -		try {
 -			List<Connection> updatedIncoming = null;
 -			if (connection.getDestination().equals(this)) {
 -				// don't add the connection twice. This may occur if we have a self-loop because we will be told
 -				// to add the connection once because we are the source and again because we are the destination.
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				updatedIncoming = new ArrayList<>(incomingConnections);
 -				if (!updatedIncoming.contains(connection)) {
 -					updatedIncoming.add(connection);
 -				}
 -			}
 -
 -			if (connection.getSource().equals(this)) {
 -				// don't add the connection twice. This may occur if we have a self-loop because we will be told
 -				// to add the connection once because we are the source and again because we are the destination.
 -				if (!destinations.containsKey(connection)) {
 -					for (final Relationship relationship : connection.getRelationships()) {
 -						final Relationship rel = getRelationship(relationship.getName());
 -						Set<Connection> set = connections.get(rel);
 -						if (set == null) {
 -							set = new HashSet<>();
 -							connections.put(rel, set);
 -						}
 -
 -						set.add(connection);
 -
 -						destinations.put(connection, connection.getDestination());
 -					}
 -
 -					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 -					if (autoTerminated != null) {
 -						autoTerminated.removeAll(connection.getRelationships());
 -						this.undefinedRelationshipsToTerminate.set(autoTerminated);
 -					}
 -				}
 -			}
 -
 -			if (updatedIncoming != null) {
 -				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -			}
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean hasIncomingConnection() {
 -		return !incomingConnectionsRef.get().isEmpty();
 -	}
 -
 -	@Override
 -	public void updateConnection(final Connection connection) throws IllegalStateException {
 -		if (requireNonNull(connection).getSource().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				//
 -				// update any relationships
 -				//
 -				// first check if any relations were removed.
 -				final List<Relationship> existingRelationships = new ArrayList<>();
 -				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
 -					if (entry.getValue().contains(connection)) {
 -						existingRelationships.add(entry.getKey());
 -					}
 -				}
 -
 -				for (final Relationship rel : connection.getRelationships()) {
 -					if (!existingRelationships.contains(rel)) {
 -						// relationship was removed. Check if this is legal.
 -						final Set<Connection> connectionsForRelationship = getConnections(rel);
 -						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
 -							// if we are running and we do not terminate undefined relationships and this is the only
 -							// connection that defines the given relationship, and that relationship is required,
 -							// then it is not legal to remove this relationship from this connection.
 -							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
 -								+ this + ", which is currently running");
 -						}
 -					}
 -				}
 -
 -				// remove the connection from any list that currently contains
 -				for (final Set<Connection> list : connections.values()) {
 -					list.remove(connection);
 -				}
 -
 -				// add the connection in for all relationships listed.
 -				for (final Relationship rel : connection.getRelationships()) {
 -					Set<Connection> set = connections.get(rel);
 -					if (set == null) {
 -						set = new HashSet<>();
 -						connections.put(rel, set);
 -					}
 -					set.add(connection);
 -				}
 -
 -				// update to the new destination
 -				destinations.put(connection, connection.getDestination());
 -
 -				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 -				if (autoTerminated != null) {
 -					autoTerminated.removeAll(connection.getRelationships());
 -					this.undefinedRelationshipsToTerminate.set(autoTerminated);
 -				}
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (connection.getDestination().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				// update our incoming connections -- we can just remove & re-add the connection to
 -				// update the list.
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 -				updatedIncoming.remove(connection);
 -				updatedIncoming.add(connection);
 -				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -	}
 -
 -	@Override
 -	public void removeConnection(final Connection connection) {
 -		boolean connectionRemoved = false;
 -
 -		if (requireNonNull(connection).getSource().equals(this)) {
 -			for (final Relationship relationship : connection.getRelationships()) {
 -				final Set<Connection> connectionsForRelationship = getConnections(relationship);
 -				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
 -					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
 -				}
 -			}
 -
 -			writeLock.lock();
 -			try {
 -				for (final Set<Connection> connectionList : this.connections.values()) {
 -					connectionList.remove(connection);
 -				}
 -
 -				connectionRemoved = (destinations.remove(connection) != null);
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (connection.getDestination().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				if (incomingConnections.contains(connection)) {
 -					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 -					updatedIncoming.remove(connection);
 -					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -					return;
 -				}
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (!connectionRemoved) {
 -			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
 -		}
 -	}
 -
 -	/**
 -	 * @param relationshipName name
 -	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
 -	 */
 -	@Override
 -	public Relationship getRelationship(final String relationshipName) {
 -		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
 -		Relationship returnRel = specRel;
 -
 -		final Set<Relationship> relationships;
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			relationships = processor.getRelationships();
 -		}
 -
 -		for (final Relationship rel : relationships) {
 -			if (rel.equals(specRel)) {
 -				returnRel = rel;
 -				break;
 -			}
 -		}
 -		return returnRel;
 -	}
 -
 -	@Override
 -	public Processor getProcessor() {
 -		return this.processor;
 -	}
 -
 -	/**
 -	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
 -	 */
 -	public Set<Connectable> getDestinations() {
 -		final Set<Connectable> nonSelfDestinations = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			for (final Connectable connectable : destinations.values()) {
 -				if (connectable != this) {
 -					nonSelfDestinations.add(connectable);
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return nonSelfDestinations;
 -	}
 -
 -	public Set<Connectable> getDestinations(final Relationship relationship) {
 -		readLock.lock();
 -		try {
 -			final Set<Connectable> destinationSet = new HashSet<>();
 -			final Set<Connection> relationshipConnections = connections.get(relationship);
 -			if (relationshipConnections != null) {
 -				for (final Connection connection : relationshipConnections) {
 -					destinationSet.add(destinations.get(connection));
 -				}
 -			}
 -			return destinationSet;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	public Set<Relationship> getUndefinedRelationships() {
 -		final Set<Relationship> undefined = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			final Set<Relationship> relationships;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				relationships = processor.getRelationships();
 -			}
 -
 -			if (relationships == null) {
 -				return undefined;
 -			}
 -			for (final Relationship relation : relationships) {
 -				final Set<Connection> connectionSet = this.connections.get(relation);
 -				if (connectionSet == null || connectionSet.isEmpty()) {
 -					undefined.add(relation);
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return undefined;
 -	}
 -
 -	/**
 -	 * Determines if the given node is a destination for this node
 -	 *
 -	 * @param node node
 -	 * @return true if is a direct destination node; false otherwise
 -	 */
 -	boolean isRelated(final ProcessorNode node) {
 -		readLock.lock();
 -		try {
 -			return this.destinations.containsValue(node);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isRunning() {
 -		readLock.lock();
 -		try {
 -			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public int getActiveThreadCount() {
 -		readLock.lock();
 -		try {
 -			return processScheduler.getActiveThreadCount(this);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isValid() {
 -		readLock.lock();
 -		try {
 -			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 -
 -			final Collection<ValidationResult> validationResults;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				validationResults = getProcessor().validate(validationContext);
 -			}
 -
 -			for (final ValidationResult result : validationResults) {
 -				if (!result.isValid()) {
 -					return false;
 -				}
 -			}
 -
 -			for (final Relationship undef : getUndefinedRelationships()) {
 -				if (!isAutoTerminated(undef)) {
 -					return false;
 -				}
 -			}
 -
 -			switch (getInputRequirement()) {
 -				case INPUT_ALLOWED:
 -					break;
 -				case INPUT_FORBIDDEN: {
 -					if (!getIncomingConnections().isEmpty()) {
 -						return false;
 -					}
 -					break;
 -				}
 -				case INPUT_REQUIRED: {
 -					if (getIncomingConnections().isEmpty()) {
 -						return false;
 -					}
 -					break;
 -				}
 -			}
 -		} catch (final Throwable t) {
 -			return false;
 -		} finally {
 -			readLock.unlock();
 -		}
 -
 -		return true;
 -	}
 -
 -	@Override
 -	public Collection<ValidationResult> getValidationErrors() {
 -		final List<ValidationResult> results = new ArrayList<>();
 -		readLock.lock();
 -		try {
 -			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 -
 -			final Collection<ValidationResult> validationResults;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				validationResults = getProcessor().validate(validationContext);
 -			}
 -
 -			for (final ValidationResult result : validationResults) {
 -				if (!result.isValid()) {
 -					results.add(result);
 -				}
 -			}
 -
 -			for (final Relationship relationship : getUndefinedRelationships()) {
 -				if (!isAutoTerminated(relationship)) {
 -					final ValidationResult error = new ValidationResult.Builder()
 -						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
 -						.subject("Relationship " + relationship.getName())
 -						.valid(false)
 -						.build();
 -					results.add(error);
 -				}
 -			}
 -
 -			switch (getInputRequirement()) {
 -				case INPUT_ALLOWED:
 -					break;
 -				case INPUT_FORBIDDEN: {
 -					final int incomingConnCount = getIncomingConnections().size();
 -					if (incomingConnCount != 0) {
 -						results.add(new ValidationResult.Builder()
 -							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
 -							.subject("Incoming Connections")
 -							.valid(false)
 -							.build());
 -					}
 -					break;
 -				}
 -				case INPUT_REQUIRED: {
 -					if (getIncomingConnections().isEmpty()) {
 -						results.add(new ValidationResult.Builder()
 -							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
 -							.subject("Incoming Connections")
 -							.valid(false)
 -							.build());
 -					}
 -					break;
 -				}
 -			}
 -		} catch (final Throwable t) {
 -			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return results;
 -	}
 -
 -	@Override
 -	public Requirement getInputRequirement() {
 -		return inputRequirement;
 -	}
 -
 -	/**
 -	 * Establishes node equality (based on the processor's identifier)
 -	 *
 -	 * @param other node
 -	 * @return true if equal
 -	 */
 -	@Override
 -	public boolean equals(final Object other) {
 -		if (!(other instanceof ProcessorNode)) {
 -			return false;
 -		}
 -		final ProcessorNode on = (ProcessorNode) other;
 -		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
 -	}
 -
 -	@Override
 -	public int hashCode() {
 -		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
 -	}
 -
 -	@Override
 -	public Collection<Relationship> getRelationships() {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			return getProcessor().getRelationships();
 -		}
 -	}
 -
 -	@Override
 -	public String toString() {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			return getProcessor().toString();
 -		}
 -	}
 -
 -	@Override
 -	public ProcessGroup getProcessGroup() {
 -		return processGroup.get();
 -	}
 -
 -	@Override
 -	public void setProcessGroup(final ProcessGroup group) {
 -		writeLock.lock();
 -		try {
 -			this.processGroup.set(group);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			processor.onTrigger(context, sessionFactory);
 -		}
 -	}
 -
 -	@Override
 -	public ConnectableType getConnectableType() {
 -		return ConnectableType.PROCESSOR;
 -	}
 -
 -	@Override
 -	public void setScheduledState(final ScheduledState scheduledState) {
 -		this.scheduledState.set(scheduledState);
 -		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
 -			yieldExpiration.set(0L);
 -		}
 -	}
 -
 -	@Override
 -	public void setAnnotationData(final String data) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
 -			}
 -
 -			this.annotationData.set(data);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public String getAnnotationData() {
 -		return annotationData.get();
 -	}
 -
 -	@Override
 -	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
 -		return getValidationErrors();
 -	}
 -
 -	@Override
 -	public void verifyCanDelete() throws IllegalStateException {
 -		verifyCanDelete(false);
 -	}
 -
 -	@Override
 -	public void verifyCanDelete(final boolean ignoreConnections) {
 -		readLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException(this + " is running");
 -			}
 -
 -			if (!ignoreConnections) {
 -				for (final Set<Connection> connectionSet : connections.values()) {
 -					for (final Connection connection : connectionSet) {
 -						connection.verifyCanDelete();
 -					}
 -				}
 -
 -				for (final Connection connection : incomingConnectionsRef.get()) {
 -					if (connection.getSource().equals(this)) {
 -						connection.verifyCanDelete();
 -					} else {
 -						throw new IllegalStateException(this + " is the destination of another component");
 -					}
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStart() {
 -		readLock.lock();
 -		try {
 -			switch (getScheduledState()) {
 -				case DISABLED:
 -					throw new IllegalStateException(this + " cannot be started because it is disabled");
 -				case RUNNING:
 -					throw new IllegalStateException(this + " cannot be started because it is already running");
 -				case STOPPED:
 -					break;
 -			}
 -			verifyNoActiveThreads();
 -
 -			if (!isValid()) {
 -				throw new IllegalStateException(this + " is not in a valid state");
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
 -		switch (getScheduledState()) {
 -			case DISABLED:
 -				throw new IllegalStateException(this + " cannot be started because it is disabled");
 -			case RUNNING:
 -				throw new IllegalStateException(this + " cannot be started because it is already running");
 -			case STOPPED:
 -				break;
 -		}
 -		verifyNoActiveThreads();
 -
 -		final Set<String> ids = new HashSet<>();
 -		for (final ControllerServiceNode node : ignoredReferences) {
 -			ids.add(node.getIdentifier());
 -		}
 -
 -		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
 -		for (final ValidationResult result : validationResults) {
 -			if (!result.isValid()) {
 -				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
 -			}
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStop() {
 -		if (getScheduledState() != ScheduledState.RUNNING) {
 -			throw new IllegalStateException(this + " is not scheduled to run");
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanUpdate() {
 -		readLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException(this + " is not stopped");
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanEnable() {
 -		readLock.lock();
 -		try {
 -			if (getScheduledState() != ScheduledState.DISABLED) {
 -				throw new IllegalStateException(this + " is not disabled");
 -			}
 -
 -			verifyNoActiveThreads();
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanDisable() {
 -		readLock.lock();
 -		try {
 -			if (getScheduledState() != ScheduledState.STOPPED) {
 -				throw new IllegalStateException(this + " is not stopped");
 -			}
 -			verifyNoActiveThreads();
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	private void verifyNoActiveThreads() throws IllegalStateException {
 -		final int threadCount = processScheduler.getActiveThreadCount(this);
 -		if (threadCount > 0) {
 -			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
 -		}
 -	}
 -
 -	@Override
 +    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
 +
 +    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
 +    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
 +    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
 +    private final AtomicReference<ProcessGroup> processGroup;
 +    private final Processor processor;
 +    private final AtomicReference<String> identifier;
 +    private final Map<Connection, Connectable> destinations;
 +    private final Map<Relationship, Set<Connection>> connections;
 +    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
 +    private final AtomicReference<List<Connection>> incomingConnectionsRef;
 +    private final ReentrantReadWriteLock rwLock;
 +    private final Lock readLock;
 +    private final Lock writeLock;
 +    private final AtomicBoolean isolated;
 +    private final AtomicBoolean lossTolerant;
 +    private final AtomicReference<ScheduledState> scheduledState;
 +    private final AtomicReference<String> comments;
 +    private final AtomicReference<String> name;
 +    private final AtomicReference<Position> position;
 +    private final AtomicReference<String> annotationData;
 +    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
 +    private final AtomicReference<String> yieldPeriod;
 +    private final AtomicReference<String> penalizationPeriod;
 +    private final AtomicReference<Map<String, String>> style;
 +    private final AtomicInteger concurrentTaskCount;
 +    private final AtomicLong yieldExpiration;
 +    private final AtomicLong schedulingNanos;
 +    private final boolean triggerWhenEmpty;
 +    private final boolean sideEffectFree;
 +    private final boolean triggeredSerially;
 +    private final boolean triggerWhenAnyDestinationAvailable;
 +    private final boolean eventDrivenSupported;
 +    private final boolean batchSupported;
 +    private final Requirement inputRequirement;
 +    private final ValidationContextFactory validationContextFactory;
 +    private final ProcessScheduler processScheduler;
 +    private long runNanos = 0L;
 +
 +    private SchedulingStrategy schedulingStrategy; // guarded by read/write lock
 +
 +    @SuppressWarnings("deprecation")
 +    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
 +        final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
 +        super(processor, uuid, validationContextFactory, controllerServiceProvider);
 +
 +        this.processor = processor;
 +        identifier = new AtomicReference<>(uuid);
 +        destinations = new HashMap<>();
 +        connections = new HashMap<>();
 +        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
 +        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
 +        rwLock = new ReentrantReadWriteLock(false);
 +        readLock = rwLock.readLock();
 +        writeLock = rwLock.writeLock();
 +        lossTolerant = new AtomicBoolean(false);
 +        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
 +        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
 +        comments = new AtomicReference<>("");
 +        name = new AtomicReference<>(processor.getClass().getSimpleName());
 +        schedulingPeriod = new AtomicReference<>("0 sec");
 +        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
 +        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
 +        yieldExpiration = new AtomicLong(0L);
 +        concurrentTaskCount = new AtomicInteger(1);
 +        position = new AtomicReference<>(new Position(0D, 0D));
 +        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
 +        this.processGroup = new AtomicReference<>();
 +        processScheduler = scheduler;
 +        annotationData = new AtomicReference<>();
 +        isolated = new AtomicBoolean(false);
 +        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
 +
 +        final Class<?> procClass = processor.getClass();
 +        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
 +        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
 +        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
 +        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
 +        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
 +            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
 +        this.validationContextFactory = validationContextFactory;
 +        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
 +            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
 +
 +        final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
 +        if (inputRequirementPresent) {
 +            inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
 +        } else {
 +            inputRequirement = Requirement.INPUT_ALLOWED;
 +        }
 +
 +        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
 +    }
 +
 +    /**
 +     * @return comments about this specific processor instance
 +     */
 +    @Override
 +    public String getComments() {
 +        return comments.get();
 +    }
 +
 +    /**
 +     * Provides and opportunity to retain information about this particular processor instance
 +     *
 +     * @param comments new comments
 +     */
 +    @Override
 +    public void setComments(final String comments) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.comments.set(comments);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public ScheduledState getScheduledState() {
 +        return scheduledState.get();
 +    }
 +
 +    @Override
 +    public Position getPosition() {
 +        return position.get();
 +    }
 +
 +    @Override
 +    public void setPosition(Position position) {
 +        this.position.set(position);
 +    }
 +
 +    @Override
 +    public Map<String, String> getStyle() {
 +        return style.get();
 +    }
 +
 +    @Override
 +    public void setStyle(final Map<String, String> style) {
 +        if (style != null) {
 +            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
 +        }
 +    }
 +
 +    @Override
 +    public String getIdentifier() {
 +        return identifier.get();
 +    }
 +
 +    /**
 +     * @return if true flow file content generated by this processor is considered loss tolerant
 +     */
 +    @Override
 +    public boolean isLossTolerant() {
 +        return lossTolerant.get();
 +    }
 +
 +    @Override
 +    public boolean isIsolated() {
 +        return isolated.get();
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isTriggerWhenEmpty() {
 +        return triggerWhenEmpty;
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isSideEffectFree() {
 +        return sideEffectFree;
 +    }
 +
 +    @Override
 +    public boolean isHighThroughputSupported() {
 +        return batchSupported;
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isTriggerWhenAnyDestinationAvailable() {
 +        return triggerWhenAnyDestinationAvailable;
 +    }
 +
 +    /**
 +     * Indicates whether flow file content made by this processor must be persisted
 +     *
 +     * @param lossTolerant tolerant
 +     */
 +    @Override
 +    public void setLossTolerant(final boolean lossTolerant) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.lossTolerant.set(lossTolerant);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Indicates whether the processor runs on only the primary node.
 +     *
 +     * @param isolated isolated
 +     */
 +    public void setIsolated(final boolean isolated) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.isolated.set(isolated);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isAutoTerminated(final Relationship relationship) {
 +        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
 +        if (terminatable == null) {
 +            return false;
 +        }
 +        return terminatable.contains(relationship);
 +    }
 +
 +    @Override
 +    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +
 +            for (final Relationship rel : terminate) {
 +                if (!getConnections(rel).isEmpty()) {
 +                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
 +                }
 +            }
 +            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
 +     */
 +    @Override
 +    public Set<Relationship> getAutoTerminatedRelationships() {
 +        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
 +        if (relationships == null) {
 +            relationships = new HashSet<>();
 +        }
 +        return Collections.unmodifiableSet(relationships);
 +    }
 +
 +    @Override
 +    public String getName() {
 +        return name.get();
 +    }
 +
 +    /**
 +     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
 +     */
 +    @SuppressWarnings("deprecation")
 +    public String getProcessorDescription() {
 +        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
 +        String description = null;
 +        if (capDesc != null) {
 +            description = capDesc.value();
 +        } else {
 +            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
 +            if (deprecatedCapDesc != null) {
 +                description = deprecatedCapDesc.value();
 +            }
 +        }
 +
 +        return description;
 +    }
 +
 +    @Override
 +    public void setName(final String name) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.name.set(name);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
 +     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
 +     */
 +    @Override
 +    public long getSchedulingPeriod(final TimeUnit timeUnit) {
 +        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
 +    }
 +
 +    @Override
 +    public boolean isEventDrivenSupported() {
 +        readLock.lock();
 +        try {
 +            return this.eventDrivenSupported;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Updates the Scheduling Strategy used for this Processor
 +     *
 +     * @param schedulingStrategy strategy
 +     *
 +     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
 +     */
 +    @Override
 +    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
 +        writeLock.lock();
 +        try {
 +            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
 +                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
 +                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
 +                // Mode. Instead, we will simply leave it in Timer-Driven mode
 +                return;
 +            }
 +
 +            this.schedulingStrategy = schedulingStrategy;
 +            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @return the currently configured scheduling strategy
 +     */
 +    @Override
 +    public SchedulingStrategy getSchedulingStrategy() {
 +        readLock.lock();
 +        try {
 +            return this.schedulingStrategy;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public String getSchedulingPeriod() {
 +        return schedulingPeriod.get();
 +    }
 +
 +    @Override
 +    public void setScheduldingPeriod(final String schedulingPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +
 +            switch (schedulingStrategy) {
 +                case CRON_DRIVEN: {
 +                    try {
 +                        new CronExpression(schedulingPeriod);
 +                    } catch (final Exception e) {
 +                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
 +                    }
 +                }
 +                    break;
 +                case PRIMARY_NODE_ONLY:
 +                case TIMER_DRIVEN: {
 +                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
 +                    if (schedulingNanos < 0) {
 +                        throw new IllegalArgumentException("Scheduling Period must be positive");
 +                    }
 +                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
 +                }
 +                    break;
 +                case EVENT_DRIVEN:
 +                default:
 +                    return;
 +            }
 +
 +            this.schedulingPeriod.set(schedulingPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public long getRunDuration(final TimeUnit timeUnit) {
 +        readLock.lock();
 +        try {
 +            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
 +        writeLock.lock();
 +        try {
 +            if (duration < 0) {
 +                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
 +            }
 +
 +            this.runNanos = timeUnit.toNanos(duration);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public long getYieldPeriod(final TimeUnit timeUnit) {
 +        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 +    }
 +
 +    @Override
 +    public String getYieldPeriod() {
 +        return yieldPeriod.get();
 +    }
 +
 +    @Override
 +    public void setYieldPeriod(final String yieldPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
 +            if (yieldMillis < 0) {
 +                throw new IllegalArgumentException("Yield duration must be positive");
 +            }
 +            this.yieldPeriod.set(yieldPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
 +     * methods.
 +     */
 +    @Override
 +    public void yield() {
 +        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
 +        yield(yieldMillis, TimeUnit.MILLISECONDS);
 +
 +        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
 +        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
 +    }
 +
 +    @Override
 +    public void yield(final long period, final TimeUnit timeUnit) {
 +        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
 +        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
 +
 +        processScheduler.yield(this);
 +    }
 +
 +    /**
 +     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
 +     */
 +    @Override
 +    public long getYieldExpiration() {
 +        return yieldExpiration.get();
 +    }
 +
 +    @Override
 +    public long getPenalizationPeriod(final TimeUnit timeUnit) {
 +        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 +    }
 +
 +    @Override
 +    public String getPenalizationPeriod() {
 +        return penalizationPeriod.get();
 +    }
 +
 +    @Override
 +    public void setPenalizationPeriod(final String penalizationPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
 +            if (penalizationMillis < 0) {
 +                throw new IllegalArgumentException("Penalization duration must be positive");
 +            }
 +            this.penalizationPeriod.set(penalizationPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Determines the number of concurrent tasks that may be running for this processor.
 +     *
 +     * @param taskCount a number of concurrent tasks this processor may have running
 +     * @throws IllegalArgumentException if the given value is less than 1
 +     */
 +    @Override
 +    public void setMaxConcurrentTasks(final int taskCount) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
 +                throw new IllegalArgumentException();
 +            }
 +            if (!triggeredSerially) {
 +                concurrentTaskCount.set(taskCount);
 +            }
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isTriggeredSerially() {
 +        return triggeredSerially;
 +    }
 +
 +    /**
 +     * @return the number of tasks that may execute concurrently for this processor
 +     */
 +    @Override
 +    public int getMaxConcurrentTasks() {
 +        return concurrentTaskCount.get();
 +    }
 +
 +    @Override
 +    public LogLevel getBulletinLevel() {
 +        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
 +    }
 +
 +    @Override
 +    public void setBulletinLevel(final LogLevel level) {
 +        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
 +    }
 +
 +    @Override
 +    public Set<Connection> getConnections() {
 +        final Set<Connection> allConnections = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            for (final Set<Connection> connectionSet : connections.values()) {
 +                allConnections.addAll(connectionSet);
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +
 +        return allConnections;
 +    }
 +
 +    @Override
 +    public List<Connection> getIncomingConnections() {
 +        return incomingConnectionsRef.get();
 +    }
 +
 +    @Override
 +    public Set<Connection> getConnections(final Relationship relationship) {
 +        final Set<Connection> applicableConnections;
 +        readLock.lock();
 +        try {
 +            applicableConnections = connections.get(relationship);
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return (applicableConnections == null) ? Collections.<Connection> emptySet() : Collections.unmodifiableSet(applicableConnections);
 +    }
 +
 +    @Override
 +    public void addConnection(final Connection connection) {
 +        Objects.requireNonNull(connection, "connection cannot be null");
 +
 +        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
 +            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
 +        }
 +
 +        writeLock.lock();
 +        try {
 +            List<Connection> updatedIncoming = null;
 +            if (connection.getDestination().equals(this)) {
 +                // don't add the connection twice. This may occur if we have a self-loop because we will be told
 +                // to add the connection once because we are the source and again because we are the destination.
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                updatedIncoming = new ArrayList<>(incomingConnections);
 +                if (!updatedIncoming.contains(connection)) {
 +                    updatedIncoming.add(connection);
 +                }
 +            }
 +
 +            if (connection.getSource().equals(this)) {
 +                // don't add the connection twice. This may occur if we have a self-loop because we will be told
 +                // to add the connection once because we are the source and again because we are the destination.
 +                if (!destinations.containsKey(connection)) {
 +                    for (final Relationship relationship : connection.getRelationships()) {
 +                        final Relationship rel = getRelationship(relationship.getName());
 +                        Set<Connection> set = connections.get(rel);
 +                        if (set == null) {
 +                            set = new HashSet<>();
 +                            connections.put(rel, set);
 +                        }
 +
 +                        set.add(connection);
 +
 +                        destinations.put(connection, connection.getDestination());
 +                    }
 +
 +                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 +                    if (autoTerminated != null) {
 +                        autoTerminated.removeAll(connection.getRelationships());
 +                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
 +                    }
 +                }
 +            }
 +
 +            if (updatedIncoming != null) {
 +                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +            }
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean hasIncomingConnection() {
 +        return !incomingConnectionsRef.get().isEmpty();
 +    }
 +
 +    @Override
 +    public void updateConnection(final Connection connection) throws IllegalStateException {
 +        if (requireNonNull(connection).getSource().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                //
 +                // update any relationships
 +                //
 +                // first check if any relations were removed.
 +                final List<Relationship> existingRelationships = new ArrayList<>();
 +                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
 +                    if (entry.getValue().contains(connection)) {
 +                        existingRelationships.add(entry.getKey());
 +                    }
 +                }
 +
 +                for (final Relationship rel : connection.getRelationships()) {
 +                    if (!existingRelationships.contains(rel)) {
 +                        // relationship was removed. Check if this is legal.
 +                        final Set<Connection> connectionsForRelationship = getConnections(rel);
 +                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
 +                            // if we are running and we do not terminate undefined relationships and this is the only
 +                            // connection that defines the given relationship, and that relationship is required,
 +                            // then it is not legal to remove this relationship from this connection.
 +                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
 +                                + this + ", which is currently running");
 +                        }
 +                    }
 +                }
 +
 +                // remove the connection from any list that currently contains
 +                for (final Set<Connection> list : connections.values()) {
 +                    list.remove(connection);
 +                }
 +
 +                // add the connection in for all relationships listed.
 +                for (final Relationship rel : connection.getRelationships()) {
 +                    Set<Connection> set = connections.get(rel);
 +                    if (set == null) {
 +                        set = new HashSet<>();
 +                        connections.put(rel, set);
 +                    }
 +                    set.add(connection);
 +                }
 +
 +                // update to the new destination
 +                destinations.put(connection, connection.getDestination());
 +
 +                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 +                if (autoTerminated != null) {
 +                    autoTerminated.removeAll(connection.getRelationships());
 +                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
 +                }
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (connection.getDestination().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                // update our incoming connections -- we can just remove & re-add the connection to
 +                // update the list.
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 +                updatedIncoming.remove(connection);
 +                updatedIncoming.add(connection);
 +                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void removeConnection(final Connection connection) {
 +        boolean connectionRemoved = false;
 +
 +        if (requireNonNull(connection).getSource().equals(this)) {
 +            for (final Relationship relationship : connection.getRelationships()) {
 +                final Set<Connection> connectionsForRelationship = getConnections(relationship);
 +                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
 +                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
 +                }
 +            }
 +
 +            writeLock.lock();
 +            try {
 +                for (final Set<Connection> connectionList : this.connections.values()) {
 +                    connectionList.remove(connection);
 +                }
 +
 +                connectionRemoved = (destinations.remove(connection) != null);
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (connection.getDestination().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                if (incomingConnections.contains(connection)) {
 +                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 +                    updatedIncoming.remove(connection);
 +                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +                    return;
 +                }
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (!connectionRemoved) {
 +            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
 +        }
 +    }
 +
 +    /**
 +     * @param relationshipName name
 +     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
 +     */
 +    @Override
 +    public Relationship getRelationship(final String relationshipName) {
 +        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
 +        Relationship returnRel = specRel;
 +
 +        final Set<Relationship> relationships;
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            relationships = processor.getRelationships();
 +        }
 +
 +        for (final Relationship rel : relationships) {
 +            if (rel.equals(specRel)) {
 +                returnRel = rel;
 +                break;
 +            }
 +        }
 +        return returnRel;
 +    }
 +
 +    @Override
 +    public Processor getProcessor() {
 +        return this.processor;
 +    }
 +
 +    /**
 +     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
 +     */
 +    public Set<Connectable> getDestinations() {
 +        final Set<Connectable> nonSelfDestinations = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            for (final Connectable connectable : destinations.values()) {
 +                if (connectable != this) {
 +                    nonSelfDestinations.add(connectable);
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return nonSelfDestinations;
 +    }
 +
 +    public Set<Connectable> getDestinations(final Relationship relationship) {
 +        readLock.lock();
 +        try {
 +            final Set<Connectable> destinationSet = new HashSet<>();
 +            final Set<Connection> relationshipConnections = connections.get(relationship);
 +            if (relationshipConnections != null) {
 +                for (final Connection connection : relationshipConnections) {
 +                    destinationSet.add(destinations.get(connection));
 +                }
 +            }
 +            return destinationSet;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    public Set<Relationship> getUndefinedRelationships() {
 +        final Set<Relationship> undefined = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            final Set<Relationship> relationships;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                relationships = processor.getRelationships();
 +            }
 +
 +            if (relationships == null) {
 +                return undefined;
 +            }
 +            for (final Relationship relation : relationships) {
 +                final Set<Connection> connectionSet = this.connections.get(relation);
 +                if (connectionSet == null || connectionSet.isEmpty()) {
 +                    undefined.add(relation);
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return undefined;
 +    }
 +
 +    /**
 +     * Determines if the given node is a destination for this node
 +     *
 +     * @param node node
 +     * @return true if is a direct destination node; false otherwise
 +     */
 +    boolean isRelated(final ProcessorNode node) {
 +        readLock.lock();
 +        try {
 +            return this.destinations.containsValue(node);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isRunning() {
 +        readLock.lock();
 +        try {
 +            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public int getActiveThreadCount() {
 +        readLock.lock();
 +        try {
 +            return processScheduler.getActiveThreadCount(this);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isValid() {
 +        readLock.lock();
 +        try {
 +            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 +
 +            final Collection<ValidationResult> validationResults;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                validationResults = getProcessor().validate(validationContext);
 +            }
 +
 +            for (final ValidationResult result : validationResults) {
 +                if (!result.isValid()) {
 +                    return false;
 +                }
 +            }
 +
 +            for (final Relationship undef : getUndefinedRelationships()) {
 +                if (!isAutoTerminated(undef)) {
 +                    return false;
 +                }
 +            }
 +
 +            switch (getInputRequirement()) {
 +                case INPUT_ALLOWED:
 +                    break;
 +                case INPUT_FORBIDDEN: {
 +                    if (!getIncomingConnections().isEmpty()) {
 +                        return false;
 +                    }
 +                    break;
 +                }
 +                case INPUT_REQUIRED: {
 +                    if (getIncomingConnections().isEmpty()) {
 +                        return false;
 +                    }
 +                    break;
 +                }
 +            }
 +        } catch (final Throwable t) {
 +            return false;
 +        } finally {
 +            readLock.unlock();
 +        }
 +
 +        return true;
 +    }
 +
 +    @Override
 +    public Collection<ValidationResult> getValidationErrors() {
 +        final List<ValidationResult> results = new ArrayList<>();
 +        readLock.lock();
 +        try {
 +            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 +
 +            final Collection<ValidationResult> validationResults;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                validationResults = getProcessor().validate(validationContext);
 +            }
 +
 +            for (final ValidationResult result : validationResults) {
 +                if (!result.isValid()) {
 +                    results.add(result);
 +                }
 +            }
 +
 +            for (final Relationship relationship : getUndefinedRelationships()) {
 +                if (!isAutoTerminated(relationship)) {
 +                    final ValidationResult error = new ValidationResult.Builder()
 +                        .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
 +                        .subject("Relationship " + relationship.getName())
 +                        .valid(false)
 +                        .build();
 +                    results.add(error);
 +                }
 +            }
 +
 +            switch (getInputRequirement()) {
 +                case INPUT_ALLOWED:
 +                    break;
 +                case INPUT_FORBIDDEN: {
 +                    final int incomingConnCount = getIncomingConnections().size();
 +                    if (incomingConnCount != 0) {
 +                        results.add(new ValidationResult.Builder()
-                             .explanation("Processor is currently configured with " + incomingConnCount + " upstream connections but does not accept any upstream connections")
-                             .subject("Upstream Connections")
++                            .explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
++                            .subject("Incoming Connections")
 +                            .valid(false)
 +                            .build());
 +                    }
 +                    break;
 +                }
 +                case INPUT_REQUIRED: {
 +                    if (getIncomingConnections().isEmpty()) {
 +                        results.add(new ValidationResult.Builder()
-                             .explanation("Processor requires an upstream connection but currently has none")
-                             .subject("Upstream Connections")
++                            .explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
++                            .subject("Incoming Connections")
 +                            .valid(false)
 +                            .build());
 +                    }
 +                    break;
 +                }
 +            }
 +        } catch (final Throwable t) {
 +            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return results;
 +    }
 +
 +    @Override
 +    public Requirement getInputRequirement() {
 +        return inputRequirement;
 +    }
 +
 +    /**
 +     * Establishes node equality (based on the processor's identifier)
 +     *
 +     * @param other node
 +     * @return true if equal
 +     */
 +    @Override
 +    public boolean equals(final Object other) {
 +        if (!(other instanceof ProcessorNode)) {
 +            return false;
 +        }
 +        final ProcessorNode on = (ProcessorNode) other;
 +        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
 +    }
 +
 +    @Override
 +    public int hashCode() {
 +        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
 +    }
 +
 +    @Override
 +    public Collection<Relationship> getRelationships() {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            return getProcessor().getRelationships();
 +        }
 +    }
 +
 +    @Override
 +    public String toString() {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            return getProcessor().toString();
 +        }
 +    }
 +
 +    @Override
 +    public ProcessGroup getProcessGroup() {
 +        return processGroup.get();
 +    }
 +
 +    @Override
 +    public void setProcessGroup(final ProcessGroup group) {
 +        writeLock.lock();
 +        try {
 +            this.processGroup.set(group);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            processor.onTrigger(context, sessionFactory);
 +        }
 +    }
 +
 +    @Override
 +    public ConnectableType getConnectableType() {
 +        return ConnectableType.PROCESSOR;
 +    }
 +
 +    @Override
 +    public void setScheduledState(final ScheduledState scheduledState) {
 +        this.scheduledState.set(scheduledState);
 +        if (!scheduledState.equals(ScheduledState.RUNNING)) { // if user stops processor, clear yield expiration
 +            yieldExpiration.set(0L);
 +        }
 +    }
 +
 +    @Override
 +    public void setAnnotationData(final String data) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
 +            }
 +
 +            this.annotationData.set(data);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public String getAnnotationData() {
 +        return annotationData.get();
 +    }
 +
 +    @Override
 +    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
 +        return getValidationErrors();
 +    }
 +
 +    @Override
 +    public void verifyCanDelete() throws IllegalStateException {
 +        verifyCanDelete(false);
 +    }
 +
 +    @Override
 +    public void verifyCanDelete(final boolean ignoreConnections) {
 +        readLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException(this + " is running");
 +            }
 +
 +            if (!ignoreConnections) {
 +                for (final Set<Connection> connectionSet : connections.values()) {
 +                    for (final Connection connection : connectionSet) {
 +                        connection.verifyCanDelete();
 +                    }
 +                }
 +
 +                for (final Connection connection : incomingConnectionsRef.get()) {
 +                    if (connection.getSource().equals(this)) {
 +                        connection.verifyCanDelete();
 +                    } else {
 +                        throw new IllegalStateException(this + " is the destination of another component");
 +                    }
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStart() {
 +        readLock.lock();
 +        try {
 +            switch (getScheduledState()) {
 +                case DISABLED:
 +                    throw new IllegalStateException(this + " cannot be started because it is disabled");
 +                case RUNNING:
 +                    throw new IllegalStateException(this + " cannot be started because it is already running");
 +                case STOPPED:
 +                    break;
 +            }
 +            verifyNoActiveThreads();
 +
 +            if (!isValid()) {
 +                throw new IllegalStateException(this + " is not in a valid state");
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
 +        switch (getScheduledState()) {
 +            case DISABLED:
 +                throw new IllegalStateException(this + " cannot be started because it is disabled");
 +            case RUNNING:
 +                throw new IllegalStateException(this + " cannot be started because it is already running");
 +            case STOPPED:
 +                break;
 +        }
 +        verifyNoActiveThreads();
 +
 +        final Set<String> ids = new HashSet<>();
 +        for (final ControllerServiceNode node : ignoredReferences) {
 +            ids.add(node.getIdentifier());
 +        }
 +
 +        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
 +        for (final ValidationResult result : validationResults) {
 +            if (!result.isValid()) {
 +                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStop() {
 +        if (getScheduledState() != ScheduledState.RUNNING) {
 +            throw new IllegalStateException(this + " is not scheduled to run");
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanUpdate() {
 +        readLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException(this + " is not stopped");
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanEnable() {
 +        readLock.lock();
 +        try {
 +            if (getScheduledState() != ScheduledState.DISABLED) {
 +                throw new IllegalStateException(this + " is not disabled");
 +            }
 +
 +            verifyNoActiveThreads();
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanDisable() {
 +        readLock.lock();
 +        try {
 +            if (getScheduledState() != ScheduledState.STOPPED) {
 +               

<TRUNCATED>

[17/19] nifi git commit: NIFI-810: Reworded validation errors pertaining ot upstream connections

Posted by ma...@apache.org.
NIFI-810: Reworded validation errors pertaining ot upstream connections


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/8e2308b7
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/8e2308b7
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/8e2308b7

Branch: refs/heads/master
Commit: 8e2308b78de480dd7848ffe8efb485a5ee61c42a
Parents: ccfb57f
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:53:04 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:53:04 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/controller/StandardProcessorNode.java    | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/8e2308b7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index ad22c6d..2b0d413 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -1048,8 +1048,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
                     final int incomingConnCount = getIncomingConnections().size();
                     if (incomingConnCount != 0) {
                         results.add(new ValidationResult.Builder()
-                            .explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
-                            .subject("Incoming Connections")
+                            .explanation("Processor does not allow upstream connections but currently has " + incomingConnCount)
+                            .subject("Upstream Connections")
                             .valid(false)
                             .build());
                     }
@@ -1058,8 +1058,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
                 case INPUT_REQUIRED: {
                     if (getIncomingConnections().isEmpty()) {
                         results.add(new ValidationResult.Builder()
-                            .explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
-                            .subject("Incoming Connections")
+                            .explanation("Processor requires an upstream connection but currently has none")
+                            .subject("Upstream Connections")
                             .valid(false)
                             .build());
                     }


[04/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index cbd0f88..0c39eda 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -16,14 +16,6 @@
  */
 package org.apache.nifi.controller;
 
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
-import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-
 import static java.util.Objects.requireNonNull;
 
 import java.util.ArrayList;
@@ -43,6 +35,17 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.connectable.Connectable;
@@ -61,8 +64,6 @@ import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.scheduling.SchedulingStrategy;
 import org.apache.nifi.util.FormatUtils;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.quartz.CronExpression;
 import org.slf4j.LoggerFactory;
 
@@ -73,1185 +74,1242 @@ import org.slf4j.LoggerFactory;
  */
 public class StandardProcessorNode extends ProcessorNode implements Connectable {
 
-    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
-
-    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
-    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
-    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
-    private final AtomicReference<ProcessGroup> processGroup;
-    private final Processor processor;
-    private final AtomicReference<String> identifier;
-    private final Map<Connection, Connectable> destinations;
-    private final Map<Relationship, Set<Connection>> connections;
-    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
-    private final AtomicReference<List<Connection>> incomingConnectionsRef;
-    private final ReentrantReadWriteLock rwLock;
-    private final Lock readLock;
-    private final Lock writeLock;
-    private final AtomicBoolean isolated;
-    private final AtomicBoolean lossTolerant;
-    private final AtomicReference<ScheduledState> scheduledState;
-    private final AtomicReference<String> comments;
-    private final AtomicReference<String> name;
-    private final AtomicReference<Position> position;
-    private final AtomicReference<String> annotationData;
-    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
-    private final AtomicReference<String> yieldPeriod;
-    private final AtomicReference<String> penalizationPeriod;
-    private final AtomicReference<Map<String, String>> style;
-    private final AtomicInteger concurrentTaskCount;
-    private final AtomicLong yieldExpiration;
-    private final AtomicLong schedulingNanos;
-    private final boolean triggerWhenEmpty;
-    private final boolean sideEffectFree;
-    private final boolean triggeredSerially;
-    private final boolean triggerWhenAnyDestinationAvailable;
-    private final boolean eventDrivenSupported;
-    private final boolean batchSupported;
-    private final ValidationContextFactory validationContextFactory;
-    private final ProcessScheduler processScheduler;
-    private long runNanos = 0L;
-
-    private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
-
-    @SuppressWarnings("deprecation")
-    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
-            final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
-        super(processor, uuid, validationContextFactory, controllerServiceProvider);
-
-        this.processor = processor;
-        identifier = new AtomicReference<>(uuid);
-        destinations = new HashMap<>();
-        connections = new HashMap<>();
-        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
-        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
-        rwLock = new ReentrantReadWriteLock(false);
-        readLock = rwLock.readLock();
-        writeLock = rwLock.writeLock();
-        lossTolerant = new AtomicBoolean(false);
-        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
-        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
-        comments = new AtomicReference<>("");
-        name = new AtomicReference<>(processor.getClass().getSimpleName());
-        schedulingPeriod = new AtomicReference<>("0 sec");
-        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
-        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
-        yieldExpiration = new AtomicLong(0L);
-        concurrentTaskCount = new AtomicInteger(1);
-        position = new AtomicReference<>(new Position(0D, 0D));
-        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
-        this.processGroup = new AtomicReference<>();
-        processScheduler = scheduler;
-        annotationData = new AtomicReference<>();
-        isolated = new AtomicBoolean(false);
-        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
-
-        final Class<?> procClass = processor.getClass();
-        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
-        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
-        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
-        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
-        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
-        this.validationContextFactory = validationContextFactory;
-        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
-        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
-    }
-
-    /**
-     * @return comments about this specific processor instance
-     */
-    @Override
-    public String getComments() {
-        return comments.get();
-    }
-
-    /**
-     * Provides and opportunity to retain information about this particular processor instance
-     *
-     * @param comments new comments
-     */
-    @Override
-    public void setComments(final String comments) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.comments.set(comments);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public ScheduledState getScheduledState() {
-        return scheduledState.get();
-    }
-
-    @Override
-    public Position getPosition() {
-        return position.get();
-    }
-
-    @Override
-    public void setPosition(Position position) {
-        this.position.set(position);
-    }
-
-    @Override
-    public Map<String, String> getStyle() {
-        return style.get();
-    }
-
-    @Override
-    public void setStyle(final Map<String, String> style) {
-        if (style != null) {
-            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
-        }
-    }
-
-    @Override
-    public String getIdentifier() {
-        return identifier.get();
-    }
-
-    /**
-     * @return if true flow file content generated by this processor is considered loss tolerant
-     */
-    @Override
-    public boolean isLossTolerant() {
-        return lossTolerant.get();
-    }
-
-    @Override
-    public boolean isIsolated() {
-        return isolated.get();
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenEmpty() {
-        return triggerWhenEmpty;
-    }
-
-    /**
-     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
-     */
-    @Override
-    public boolean isSideEffectFree() {
-        return sideEffectFree;
-    }
-
-    @Override
-    public boolean isHighThroughputSupported() {
-        return batchSupported;
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenAnyDestinationAvailable() {
-        return triggerWhenAnyDestinationAvailable;
-    }
-
-    /**
-     * Indicates whether flow file content made by this processor must be persisted
-     *
-     * @param lossTolerant tolerant
-     */
-    @Override
-    public void setLossTolerant(final boolean lossTolerant) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.lossTolerant.set(lossTolerant);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Indicates whether the processor runs on only the primary node.
-     *
-     * @param isolated isolated
-     */
-    public void setIsolated(final boolean isolated) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.isolated.set(isolated);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isAutoTerminated(final Relationship relationship) {
-        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
-        if (terminatable == null) {
-            return false;
-        }
-        return terminatable.contains(relationship);
-    }
-
-    @Override
-    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            for (final Relationship rel : terminate) {
-                if (!getConnections(rel).isEmpty()) {
-                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
-                }
-            }
-            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
-     */
-    @Override
-    public Set<Relationship> getAutoTerminatedRelationships() {
-        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
-        if (relationships == null) {
-            relationships = new HashSet<>();
-        }
-        return Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    public String getName() {
-        return name.get();
-    }
-
-    /**
-     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
-     */
-    @SuppressWarnings("deprecation")
-    public String getProcessorDescription() {
-        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
-        String description = null;
-        if (capDesc != null) {
-            description = capDesc.value();
-        } else {
-            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
-                    = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
-            if (deprecatedCapDesc != null) {
-                description = deprecatedCapDesc.value();
-            }
-        }
-
-        return description;
-    }
-
-    @Override
-    public void setName(final String name) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.name.set(name);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
-     */
-    @Override
-    public long getSchedulingPeriod(final TimeUnit timeUnit) {
-        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
-    }
-
-    @Override
-    public boolean isEventDrivenSupported() {
-        readLock.lock();
-        try {
-            return this.eventDrivenSupported;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    /**
-     * Updates the Scheduling Strategy used for this Processor
-     *
-     * @param schedulingStrategy strategy
-     *
-     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
-     */
-    @Override
-    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
-        writeLock.lock();
-        try {
-            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
-                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
-                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
-                // Mode. Instead, we will simply leave it in Timer-Driven mode
-                return;
-            }
-
-            this.schedulingStrategy = schedulingStrategy;
-            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return the currently configured scheduling strategy
-     */
-    @Override
-    public SchedulingStrategy getSchedulingStrategy() {
-        readLock.lock();
-        try {
-            return this.schedulingStrategy;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public String getSchedulingPeriod() {
-        return schedulingPeriod.get();
-    }
-
-    @Override
-    public void setScheduldingPeriod(final String schedulingPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            switch (schedulingStrategy) {
-                case CRON_DRIVEN: {
-                    try {
-                        new CronExpression(schedulingPeriod);
-                    } catch (final Exception e) {
-                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
-                    }
-                }
-                break;
-                case PRIMARY_NODE_ONLY:
-                case TIMER_DRIVEN: {
-                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
-                    if (schedulingNanos < 0) {
-                        throw new IllegalArgumentException("Scheduling Period must be positive");
-                    }
-                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
-                }
-                break;
-                case EVENT_DRIVEN:
-                default:
-                    return;
-            }
-
-            this.schedulingPeriod.set(schedulingPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getRunDuration(final TimeUnit timeUnit) {
-        readLock.lock();
-        try {
-            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
-        writeLock.lock();
-        try {
-            if (duration < 0) {
-                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
-            }
-
-            this.runNanos = timeUnit.toNanos(duration);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getYieldPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getYieldPeriod() {
-        return yieldPeriod.get();
-    }
-
-    @Override
-    public void setYieldPeriod(final String yieldPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
-            if (yieldMillis < 0) {
-                throw new IllegalArgumentException("Yield duration must be positive");
-            }
-            this.yieldPeriod.set(yieldPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
-     * methods.
-     */
-    @Override
-    public void yield() {
-        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
-        yield(yieldMillis, TimeUnit.MILLISECONDS);
-
-        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
-        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
-    }
-
-    @Override
-    public void yield(final long period, final TimeUnit timeUnit) {
-        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
-        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
-
-        processScheduler.yield(this);
-    }
-
-    /**
-     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
-     */
-    @Override
-    public long getYieldExpiration() {
-        return yieldExpiration.get();
-    }
-
-    @Override
-    public long getPenalizationPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getPenalizationPeriod() {
-        return penalizationPeriod.get();
-    }
-
-    @Override
-    public void setPenalizationPeriod(final String penalizationPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
-            if (penalizationMillis < 0) {
-                throw new IllegalArgumentException("Penalization duration must be positive");
-            }
-            this.penalizationPeriod.set(penalizationPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Determines the number of concurrent tasks that may be running for this processor.
-     *
-     * @param taskCount a number of concurrent tasks this processor may have running
-     * @throws IllegalArgumentException if the given value is less than 1
-     */
-    @Override
-    public void setMaxConcurrentTasks(final int taskCount) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
-                throw new IllegalArgumentException();
-            }
-            if (!triggeredSerially) {
-                concurrentTaskCount.set(taskCount);
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isTriggeredSerially() {
-        return triggeredSerially;
-    }
-
-    /**
-     * @return the number of tasks that may execute concurrently for this processor
-     */
-    @Override
-    public int getMaxConcurrentTasks() {
-        return concurrentTaskCount.get();
-    }
-
-    @Override
-    public LogLevel getBulletinLevel() {
-        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
-    }
-
-    @Override
-    public void setBulletinLevel(final LogLevel level) {
-        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
-    }
-
-    @Override
-    public Set<Connection> getConnections() {
-        final Set<Connection> allConnections = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Set<Connection> connectionSet : connections.values()) {
-                allConnections.addAll(connectionSet);
-            }
-        } finally {
-            readLock.unlock();
-        }
-
-        return allConnections;
-    }
-
-    @Override
-    public List<Connection> getIncomingConnections() {
-        return incomingConnectionsRef.get();
-    }
-
-    @Override
-    public Set<Connection> getConnections(final Relationship relationship) {
-        final Set<Connection> applicableConnections;
-        readLock.lock();
-        try {
-            applicableConnections = connections.get(relationship);
-        } finally {
-            readLock.unlock();
-        }
-        return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
-    }
-
-    @Override
-    public void addConnection(final Connection connection) {
-        Objects.requireNonNull(connection, "connection cannot be null");
-
-        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
-            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
-        }
-
-        writeLock.lock();
-        try {
-            List<Connection> updatedIncoming = null;
-            if (connection.getDestination().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                updatedIncoming = new ArrayList<>(incomingConnections);
-                if (!updatedIncoming.contains(connection)) {
-                    updatedIncoming.add(connection);
-                }
-            }
-
-            if (connection.getSource().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                if (!destinations.containsKey(connection)) {
-                    for (final Relationship relationship : connection.getRelationships()) {
-                        final Relationship rel = getRelationship(relationship.getName());
-                        Set<Connection> set = connections.get(rel);
-                        if (set == null) {
-                            set = new HashSet<>();
-                            connections.put(rel, set);
-                        }
-
-                        set.add(connection);
-
-                        destinations.put(connection, connection.getDestination());
-                    }
-
-                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                    if (autoTerminated != null) {
-                        autoTerminated.removeAll(connection.getRelationships());
-                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                    }
-                }
-            }
-
-            if (updatedIncoming != null) {
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean hasIncomingConnection() {
-        return !incomingConnectionsRef.get().isEmpty();
-    }
-
-    @Override
-    public void updateConnection(final Connection connection) throws IllegalStateException {
-        if (requireNonNull(connection).getSource().equals(this)) {
-            writeLock.lock();
-            try {
-                //
-                // update any relationships
-                //
-                // first check if any relations were removed.
-                final List<Relationship> existingRelationships = new ArrayList<>();
-                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
-                    if (entry.getValue().contains(connection)) {
-                        existingRelationships.add(entry.getKey());
-                    }
-                }
-
-                for (final Relationship rel : connection.getRelationships()) {
-                    if (!existingRelationships.contains(rel)) {
-                        // relationship was removed. Check if this is legal.
-                        final Set<Connection> connectionsForRelationship = getConnections(rel);
-                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
-                            // if we are running and we do not terminate undefined relationships and this is the only
-                            // connection that defines the given relationship, and that relationship is required,
-                            // then it is not legal to remove this relationship from this connection.
-                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
-                                    + this + ", which is currently running");
-                        }
-                    }
-                }
-
-                // remove the connection from any list that currently contains
-                for (final Set<Connection> list : connections.values()) {
-                    list.remove(connection);
-                }
-
-                // add the connection in for all relationships listed.
-                for (final Relationship rel : connection.getRelationships()) {
-                    Set<Connection> set = connections.get(rel);
-                    if (set == null) {
-                        set = new HashSet<>();
-                        connections.put(rel, set);
-                    }
-                    set.add(connection);
-                }
-
-                // update to the new destination
-                destinations.put(connection, connection.getDestination());
-
-                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                if (autoTerminated != null) {
-                    autoTerminated.removeAll(connection.getRelationships());
-                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                // update our incoming connections -- we can just remove & re-add the connection to
-                // update the list.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                updatedIncoming.remove(connection);
-                updatedIncoming.add(connection);
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            } finally {
-                writeLock.unlock();
-            }
-        }
-    }
-
-    @Override
-    public void removeConnection(final Connection connection) {
-        boolean connectionRemoved = false;
-
-        if (requireNonNull(connection).getSource().equals(this)) {
-            for (final Relationship relationship : connection.getRelationships()) {
-                final Set<Connection> connectionsForRelationship = getConnections(relationship);
-                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
-                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
-                }
-            }
-
-            writeLock.lock();
-            try {
-                for (final Set<Connection> connectionList : this.connections.values()) {
-                    connectionList.remove(connection);
-                }
-
-                connectionRemoved = (destinations.remove(connection) != null);
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                if (incomingConnections.contains(connection)) {
-                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                    updatedIncoming.remove(connection);
-                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-                    return;
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (!connectionRemoved) {
-            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
-        }
-    }
-
-    /**
-     * @param relationshipName name
-     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
-     */
-    @Override
-    public Relationship getRelationship(final String relationshipName) {
-        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
-        Relationship returnRel = specRel;
-
-        final Set<Relationship> relationships;
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            relationships = processor.getRelationships();
-        }
-
-        for (final Relationship rel : relationships) {
-            if (rel.equals(specRel)) {
-                returnRel = rel;
-                break;
-            }
-        }
-        return returnRel;
-    }
-
-    @Override
-    public Processor getProcessor() {
-        return this.processor;
-    }
-
-    /**
-     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
-     */
-    public Set<Connectable> getDestinations() {
-        final Set<Connectable> nonSelfDestinations = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Connectable connectable : destinations.values()) {
-                if (connectable != this) {
-                    nonSelfDestinations.add(connectable);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return nonSelfDestinations;
-    }
-
-    public Set<Connectable> getDestinations(final Relationship relationship) {
-        readLock.lock();
-        try {
-            final Set<Connectable> destinationSet = new HashSet<>();
-            final Set<Connection> relationshipConnections = connections.get(relationship);
-            if (relationshipConnections != null) {
-                for (final Connection connection : relationshipConnections) {
-                    destinationSet.add(destinations.get(connection));
-                }
-            }
-            return destinationSet;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    public Set<Relationship> getUndefinedRelationships() {
-        final Set<Relationship> undefined = new HashSet<>();
-        readLock.lock();
-        try {
-            final Set<Relationship> relationships;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                relationships = processor.getRelationships();
-            }
-
-            if (relationships == null) {
-                return undefined;
-            }
-            for (final Relationship relation : relationships) {
-                final Set<Connection> connectionSet = this.connections.get(relation);
-                if (connectionSet == null || connectionSet.isEmpty()) {
-                    undefined.add(relation);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return undefined;
-    }
-
-    /**
-     * Determines if the given node is a destination for this node
-     *
-     * @param node node
-     * @return true if is a direct destination node; false otherwise
-     */
-    boolean isRelated(final ProcessorNode node) {
-        readLock.lock();
-        try {
-            return this.destinations.containsValue(node);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isRunning() {
-        readLock.lock();
-        try {
-            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public int getActiveThreadCount() {
-        readLock.lock();
-        try {
-            return processScheduler.getActiveThreadCount(this);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isValid() {
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    return false;
-                }
-            }
-
-            for (final Relationship undef : getUndefinedRelationships()) {
-                if (!isAutoTerminated(undef)) {
-                    return false;
-                }
-            }
-        } catch (final Throwable t) {
-            return false;
-        } finally {
-            readLock.unlock();
-        }
-
-        return true;
-    }
-
-    @Override
-    public Collection<ValidationResult> getValidationErrors() {
-        final List<ValidationResult> results = new ArrayList<>();
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    results.add(result);
-                }
-            }
-
-            for (final Relationship relationship : getUndefinedRelationships()) {
-                if (!isAutoTerminated(relationship)) {
-                    final ValidationResult error = new ValidationResult.Builder()
-                            .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
-                            .subject("Relationship " + relationship.getName())
-                            .valid(false)
-                            .build();
-                    results.add(error);
-                }
-            }
-        } catch (final Throwable t) {
-            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
-        } finally {
-            readLock.unlock();
-        }
-        return results;
-    }
-
-    /**
-     * Establishes node equality (based on the processor's identifier)
-     *
-     * @param other node
-     * @return true if equal
-     */
-    @Override
-    public boolean equals(final Object other) {
-        if (!(other instanceof ProcessorNode)) {
-            return false;
-        }
-        final ProcessorNode on = (ProcessorNode) other;
-        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
-    }
-
-    @Override
-    public int hashCode() {
-        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
-    }
-
-    @Override
-    public Collection<Relationship> getRelationships() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().getRelationships();
-        }
-    }
-
-    @Override
-    public String toString() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().toString();
-        }
-    }
-
-    @Override
-    public ProcessGroup getProcessGroup() {
-        return processGroup.get();
-    }
-
-    @Override
-    public void setProcessGroup(final ProcessGroup group) {
-        writeLock.lock();
-        try {
-            this.processGroup.set(group);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            processor.onTrigger(context, sessionFactory);
-        }
-    }
-
-    @Override
-    public ConnectableType getConnectableType() {
-        return ConnectableType.PROCESSOR;
-    }
-
-    @Override
-    public void setScheduledState(final ScheduledState scheduledState) {
-        this.scheduledState.set(scheduledState);
-        if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
-            yieldExpiration.set(0L);
-        }
-    }
-
-    @Override
-    public void setAnnotationData(final String data) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
-            }
-
-            this.annotationData.set(data);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public String getAnnotationData() {
-        return annotationData.get();
-    }
-
-    @Override
-    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
-        return processor.validate(validationContext);
-    }
-
-    @Override
-    public void verifyCanDelete() throws IllegalStateException {
-        verifyCanDelete(false);
-    }
-
-    @Override
-    public void verifyCanDelete(final boolean ignoreConnections) {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is running");
-            }
-
-            if (!ignoreConnections) {
-                for (final Set<Connection> connectionSet : connections.values()) {
-                    for (final Connection connection : connectionSet) {
-                        connection.verifyCanDelete();
-                    }
-                }
-
-                for (final Connection connection : incomingConnectionsRef.get()) {
-                    if (connection.getSource().equals(this)) {
-                        connection.verifyCanDelete();
-                    } else {
-                        throw new IllegalStateException(this + " is the destination of another component");
-                    }
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart() {
-        readLock.lock();
-        try {
-            switch (getScheduledState()) {
-                case DISABLED:
-                    throw new IllegalStateException(this + " cannot be started because it is disabled");
-                case RUNNING:
-                    throw new IllegalStateException(this + " cannot be started because it is already running");
-                case STOPPED:
-                    break;
-            }
-            verifyNoActiveThreads();
-
-            if (!isValid()) {
-                throw new IllegalStateException(this + " is not in a valid state");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
-        switch (getScheduledState()) {
-            case DISABLED:
-                throw new IllegalStateException(this + " cannot be started because it is disabled");
-            case RUNNING:
-                throw new IllegalStateException(this + " cannot be started because it is already running");
-            case STOPPED:
-                break;
-        }
-        verifyNoActiveThreads();
-
-        final Set<String> ids = new HashSet<>();
-        for (final ControllerServiceNode node : ignoredReferences) {
-            ids.add(node.getIdentifier());
-        }
-
-        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
-        for (final ValidationResult result : validationResults) {
-            if (!result.isValid()) {
-                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
-            }
-        }
-    }
-
-    @Override
-    public void verifyCanStop() {
-        if (getScheduledState() != ScheduledState.RUNNING) {
-            throw new IllegalStateException(this + " is not scheduled to run");
-        }
-    }
-
-    @Override
-    public void verifyCanUpdate() {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanEnable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.DISABLED) {
-                throw new IllegalStateException(this + " is not disabled");
-            }
-
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanDisable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.STOPPED) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    private void verifyNoActiveThreads() throws IllegalStateException {
-        final int threadCount = processScheduler.getActiveThreadCount(this);
-        if (threadCount > 0) {
-            throw new IllegalStateException(this + " has " + threadCount + " threads still active");
-        }
-    }
-
-    @Override
-    public void verifyModifiable() throws IllegalStateException {
-        if (isRunning()) {
-            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-        }
-    }
+	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
+
+	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
+	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
+	private final AtomicReference<ProcessGroup> processGroup;
+	private final Processor processor;
+	private final AtomicReference<String> identifier;
+	private final Map<Connection, Connectable> destinations;
+	private final Map<Relationship, Set<Connection>> connections;
+	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
+	private final AtomicReference<List<Connection>> incomingConnectionsRef;
+	private final ReentrantReadWriteLock rwLock;
+	private final Lock readLock;
+	private final Lock writeLock;
+	private final AtomicBoolean isolated;
+	private final AtomicBoolean lossTolerant;
+	private final AtomicReference<ScheduledState> scheduledState;
+	private final AtomicReference<String> comments;
+	private final AtomicReference<String> name;
+	private final AtomicReference<Position> position;
+	private final AtomicReference<String> annotationData;
+	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
+	private final AtomicReference<String> yieldPeriod;
+	private final AtomicReference<String> penalizationPeriod;
+	private final AtomicReference<Map<String, String>> style;
+	private final AtomicInteger concurrentTaskCount;
+	private final AtomicLong yieldExpiration;
+	private final AtomicLong schedulingNanos;
+	private final boolean triggerWhenEmpty;
+	private final boolean sideEffectFree;
+	private final boolean triggeredSerially;
+	private final boolean triggerWhenAnyDestinationAvailable;
+	private final boolean eventDrivenSupported;
+	private final boolean batchSupported;
+	private final Requirement inputRequirement;
+	private final ValidationContextFactory validationContextFactory;
+	private final ProcessScheduler processScheduler;
+	private long runNanos = 0L;
+
+	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
+
+	@SuppressWarnings("deprecation")
+	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
+		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
+		super(processor, uuid, validationContextFactory, controllerServiceProvider);
+
+		this.processor = processor;
+		identifier = new AtomicReference<>(uuid);
+		destinations = new HashMap<>();
+		connections = new HashMap<>();
+		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
+		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+		rwLock = new ReentrantReadWriteLock(false);
+		readLock = rwLock.readLock();
+		writeLock = rwLock.writeLock();
+		lossTolerant = new AtomicBoolean(false);
+		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
+		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
+		comments = new AtomicReference<>("");
+		name = new AtomicReference<>(processor.getClass().getSimpleName());
+		schedulingPeriod = new AtomicReference<>("0 sec");
+		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
+		yieldExpiration = new AtomicLong(0L);
+		concurrentTaskCount = new AtomicInteger(1);
+		position = new AtomicReference<>(new Position(0D, 0D));
+		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
+		this.processGroup = new AtomicReference<>();
+		processScheduler = scheduler;
+		annotationData = new AtomicReference<>();
+		isolated = new AtomicBoolean(false);
+		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
+
+		final Class<?> procClass = processor.getClass();
+		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
+		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
+		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
+		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
+		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
+		this.validationContextFactory = validationContextFactory;
+		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
+
+		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
+		if (inputRequirementPresent) {
+			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
+		} else {
+			inputRequirement = Requirement.INPUT_ALLOWED;
+		}
+
+		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
+	}
+
+	/**
+	 * @return comments about this specific processor instance
+	 */
+	@Override
+	public String getComments() {
+		return comments.get();
+	}
+
+	/**
+	 * Provides and opportunity to retain information about this particular processor instance
+	 *
+	 * @param comments new comments
+	 */
+	@Override
+	public void setComments(final String comments) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.comments.set(comments);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public ScheduledState getScheduledState() {
+		return scheduledState.get();
+	}
+
+	@Override
+	public Position getPosition() {
+		return position.get();
+	}
+
+	@Override
+	public void setPosition(Position position) {
+		this.position.set(position);
+	}
+
+	@Override
+	public Map<String, String> getStyle() {
+		return style.get();
+	}
+
+	@Override
+	public void setStyle(final Map<String, String> style) {
+		if (style != null) {
+			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
+		}
+	}
+
+	@Override
+	public String getIdentifier() {
+		return identifier.get();
+	}
+
+	/**
+	 * @return if true flow file content generated by this processor is considered loss tolerant
+	 */
+	@Override
+	public boolean isLossTolerant() {
+		return lossTolerant.get();
+	}
+
+	@Override
+	public boolean isIsolated() {
+		return isolated.get();
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenEmpty() {
+		return triggerWhenEmpty;
+	}
+
+	/**
+	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isSideEffectFree() {
+		return sideEffectFree;
+	}
+
+	@Override
+	public boolean isHighThroughputSupported() {
+		return batchSupported;
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenAnyDestinationAvailable() {
+		return triggerWhenAnyDestinationAvailable;
+	}
+
+	/**
+	 * Indicates whether flow file content made by this processor must be persisted
+	 *
+	 * @param lossTolerant tolerant
+	 */
+	@Override
+	public void setLossTolerant(final boolean lossTolerant) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.lossTolerant.set(lossTolerant);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Indicates whether the processor runs on only the primary node.
+	 *
+	 * @param isolated isolated
+	 */
+	public void setIsolated(final boolean isolated) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.isolated.set(isolated);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isAutoTerminated(final Relationship relationship) {
+		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
+		if (terminatable == null) {
+			return false;
+		}
+		return terminatable.contains(relationship);
+	}
+
+	@Override
+	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			for (final Relationship rel : terminate) {
+				if (!getConnections(rel).isEmpty()) {
+					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
+				}
+			}
+			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
+	 */
+	@Override
+	public Set<Relationship> getAutoTerminatedRelationships() {
+		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
+		if (relationships == null) {
+			relationships = new HashSet<>();
+		}
+		return Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	public String getName() {
+		return name.get();
+	}
+
+	/**
+	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
+	 */
+	@SuppressWarnings("deprecation")
+	public String getProcessorDescription() {
+		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
+		String description = null;
+		if (capDesc != null) {
+			description = capDesc.value();
+		} else {
+			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
+			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
+			if (deprecatedCapDesc != null) {
+				description = deprecatedCapDesc.value();
+			}
+		}
+
+		return description;
+	}
+
+	@Override
+	public void setName(final String name) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.name.set(name);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
+	 */
+	@Override
+	public long getSchedulingPeriod(final TimeUnit timeUnit) {
+		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+	}
+
+	@Override
+	public boolean isEventDrivenSupported() {
+		readLock.lock();
+		try {
+			return this.eventDrivenSupported;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	/**
+	 * Updates the Scheduling Strategy used for this Processor
+	 *
+	 * @param schedulingStrategy strategy
+	 *
+	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
+	 */
+	@Override
+	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
+		writeLock.lock();
+		try {
+			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
+				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
+				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
+				// Mode. Instead, we will simply leave it in Timer-Driven mode
+				return;
+			}
+
+			this.schedulingStrategy = schedulingStrategy;
+			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return the currently configured scheduling strategy
+	 */
+	@Override
+	public SchedulingStrategy getSchedulingStrategy() {
+		readLock.lock();
+		try {
+			return this.schedulingStrategy;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public String getSchedulingPeriod() {
+		return schedulingPeriod.get();
+	}
+
+	@Override
+	public void setScheduldingPeriod(final String schedulingPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			switch (schedulingStrategy) {
+				case CRON_DRIVEN: {
+					try {
+						new CronExpression(schedulingPeriod);
+					} catch (final Exception e) {
+						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
+					}
+				}
+				break;
+				case PRIMARY_NODE_ONLY:
+				case TIMER_DRIVEN: {
+					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+					if (schedulingNanos < 0) {
+						throw new IllegalArgumentException("Scheduling Period must be positive");
+					}
+					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+				}
+				break;
+				case EVENT_DRIVEN:
+				default:
+					return;
+			}
+
+			this.schedulingPeriod.set(schedulingPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getRunDuration(final TimeUnit timeUnit) {
+		readLock.lock();
+		try {
+			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
+		writeLock.lock();
+		try {
+			if (duration < 0) {
+				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
+			}
+
+			this.runNanos = timeUnit.toNanos(duration);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getYieldPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getYieldPeriod() {
+		return yieldPeriod.get();
+	}
+
+	@Override
+	public void setYieldPeriod(final String yieldPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+			if (yieldMillis < 0) {
+				throw new IllegalArgumentException("Yield duration must be positive");
+			}
+			this.yieldPeriod.set(yieldPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+	 * methods.
+	 */
+	@Override
+	public void yield() {
+		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+		yield(yieldMillis, TimeUnit.MILLISECONDS);
+
+		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
+		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
+	}
+
+	@Override
+	public void yield(final long period, final TimeUnit timeUnit) {
+		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
+		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+
+		processScheduler.yield(this);
+	}
+
+	/**
+	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
+	 */
+	@Override
+	public long getYieldExpiration() {
+		return yieldExpiration.get();
+	}
+
+	@Override
+	public long getPenalizationPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getPenalizationPeriod() {
+		return penalizationPeriod.get();
+	}
+
+	@Override
+	public void setPenalizationPeriod(final String penalizationPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
+			if (penalizationMillis < 0) {
+				throw new IllegalArgumentException("Penalization duration must be positive");
+			}
+			this.penalizationPeriod.set(penalizationPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Determines the number of concurrent tasks that may be running for this processor.
+	 *
+	 * @param taskCount a number of concurrent tasks this processor may have running
+	 * @throws IllegalArgumentException if the given value is less than 1
+	 */
+	@Override
+	public void setMaxConcurrentTasks(final int taskCount) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
+				throw new IllegalArgumentException();
+			}
+			if (!triggeredSerially) {
+				concurrentTaskCount.set(taskCount);
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isTriggeredSerially() {
+		return triggeredSerially;
+	}
+
+	/**
+	 * @return the number of tasks that may execute concurrently for this processor
+	 */
+	@Override
+	public int getMaxConcurrentTasks() {
+		return concurrentTaskCount.get();
+	}
+
+	@Override
+	public LogLevel getBulletinLevel() {
+		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
+	}
+
+	@Override
+	public void setBulletinLevel(final LogLevel level) {
+		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
+	}
+
+	@Override
+	public Set<Connection> getConnections() {
+		final Set<Connection> allConnections = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Set<Connection> connectionSet : connections.values()) {
+				allConnections.addAll(connectionSet);
+			}
+		} finally {
+			readLock.unlock();
+		}
+
+		return allConnections;
+	}
+
+	@Override
+	public List<Connection> getIncomingConnections() {
+		return incomingConnectionsRef.get();
+	}
+
+	@Override
+	public Set<Connection> getConnections(final Relationship relationship) {
+		final Set<Connection> applicableConnections;
+		readLock.lock();
+		try {
+			applicableConnections = connections.get(relationship);
+		} finally {
+			readLock.unlock();
+		}
+		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
+	}
+
+	@Override
+	public void addConnection(final Connection connection) {
+		Objects.requireNonNull(connection, "connection cannot be null");
+
+		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
+			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
+		}
+
+		writeLock.lock();
+		try {
+			List<Connection> updatedIncoming = null;
+			if (connection.getDestination().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				updatedIncoming = new ArrayList<>(incomingConnections);
+				if (!updatedIncoming.contains(connection)) {
+					updatedIncoming.add(connection);
+				}
+			}
+
+			if (connection.getSource().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				if (!destinations.containsKey(connection)) {
+					for (final Relationship relationship : connection.getRelationships()) {
+						final Relationship rel = getRelationship(relationship.getName());
+						Set<Connection> set = connections.get(rel);
+						if (set == null) {
+							set = new HashSet<>();
+							connections.put(rel, set);
+						}
+
+						set.add(connection);
+
+						destinations.put(connection, connection.getDestination());
+					}
+
+					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+					if (autoTerminated != null) {
+						autoTerminated.removeAll(connection.getRelationships());
+						this.undefinedRelationshipsToTerminate.set(autoTerminated);
+					}
+				}
+			}
+
+			if (updatedIncoming != null) {
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean hasIncomingConnection() {
+		return !incomingConnectionsRef.get().isEmpty();
+	}
+
+	@Override
+	public void updateConnection(final Connection connection) throws IllegalStateException {
+		if (requireNonNull(connection).getSource().equals(this)) {
+			writeLock.lock();
+			try {
+				//
+				// update any relationships
+				//
+				// first check if any relations were removed.
+				final List<Relationship> existingRelationships = new ArrayList<>();
+				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
+					if (entry.getValue().contains(connection)) {
+						existingRelationships.add(entry.getKey());
+					}
+				}
+
+				for (final Relationship rel : connection.getRelationships()) {
+					if (!existingRelationships.contains(rel)) {
+						// relationship was removed. Check if this is legal.
+						final Set<Connection> connectionsForRelationship = getConnections(rel);
+						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
+							// if we are running and we do not terminate undefined relationships and this is the only
+							// connection that defines the given relationship, and that relationship is required,
+							// then it is not legal to remove this relationship from this connection.
+							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
+								+ this + ", which is currently running");
+						}
+					}
+				}
+
+				// remove the connection from any list that currently contains
+				for (final Set<Connection> list : connections.values()) {
+					list.remove(connection);
+				}
+
+				// add the connection in for all relationships listed.
+				for (final Relationship rel : connection.getRelationships()) {
+					Set<Connection> set = connections.get(rel);
+					if (set == null) {
+						set = new HashSet<>();
+						connections.put(rel, set);
+					}
+					set.add(connection);
+				}
+
+				// update to the new destination
+				destinations.put(connection, connection.getDestination());
+
+				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+				if (autoTerminated != null) {
+					autoTerminated.removeAll(connection.getRelationships());
+					this.undefinedRelationshipsToTerminate.set(autoTerminated);
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				// update our incoming connections -- we can just remove & re-add the connection to
+				// update the list.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+				updatedIncoming.remove(connection);
+				updatedIncoming.add(connection);
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			} finally {
+				writeLock.unlock();
+			}
+		}
+	}
+
+	@Override
+	public void removeConnection(final Connection connection) {
+		boolean connectionRemoved = false;
+
+		if (requireNonNull(connection).getSource().equals(this)) {
+			for (final Relationship relationship : connection.getRelationships()) {
+				final Set<Connection> connectionsForRelationship = getConnections(relationship);
+				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
+					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
+				}
+			}
+
+			writeLock.lock();
+			try {
+				for (final Set<Connection> connectionList : this.connections.values()) {
+					connectionList.remove(connection);
+				}
+
+				connectionRemoved = (destinations.remove(connection) != null);
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				if (incomingConnections.contains(connection)) {
+					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+					updatedIncoming.remove(connection);
+					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+					return;
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (!connectionRemoved) {
+			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
+		}
+	}
+
+	/**
+	 * @param relationshipName name
+	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
+	 */
+	@Override
+	public Relationship getRelationship(final String relationshipName) {
+		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
+		Relationship returnRel = specRel;
+
+		final Set<Relationship> relationships;
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			relationships = processor.getRelationships();
+		}
+
+		for (final Relationship rel : relationships) {
+			if (rel.equals(specRel)) {
+				returnRel = rel;
+				break;
+			}
+		}
+		return returnRel;
+	}
+
+	@Override
+	public Processor getProcessor() {
+		return this.processor;
+	}
+
+	/**
+	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
+	 */
+	public Set<Connectable> getDestinations() {
+		final Set<Connectable> nonSelfDestinations = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Connectable connectable : destinations.values()) {
+				if (connectable != this) {
+					nonSelfDestinations.add(connectable);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return nonSelfDestinations;
+	}
+
+	public Set<Connectable> getDestinations(final Relationship relationship) {
+		readLock.lock();
+		try {
+			final Set<Connectable> destinationSet = new HashSet<>();
+			final Set<Connection> relationshipConnections = connections.get(relationship);
+			if (relationshipConnections != null) {
+				for (final Connection connection : relationshipConnections) {
+					destinationSet.add(destinations.get(connection));
+				}
+			}
+			return destinationSet;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	public Set<Relationship> getUndefinedRelationships() {
+		final Set<Relationship> undefined = new HashSet<>();
+		readLock.lock();
+		try {
+			final Set<Relationship> relationships;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				relationships = processor.getRelationships();
+			}
+
+			if (relationships == null) {
+				return undefined;
+			}
+			for (final Relationship relation : relationships) {
+				final Set<Connection> connectionSet = this.connections.get(relation);
+				if (connectionSet == null || connectionSet.isEmpty()) {
+					undefined.add(relation);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return undefined;
+	}
+
+	/**
+	 * Determines if the given node is a destination for this node
+	 *
+	 * @param node node
+	 * @return true if is a direct destination node; false otherwise
+	 */
+	boolean isRelated(final ProcessorNode node) {
+		readLock.lock();
+		try {
+			return this.destinations.containsValue(node);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isRunning() {
+		readLock.lock();
+		try {
+			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public int getActiveThreadCount() {
+		readLock.lock();
+		try {
+			return processScheduler.getActiveThreadCount(this);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isValid() {
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					return false;
+				}
+			}
+
+			for (final Relationship undef : getUndefinedRelationships()) {
+				if (!isAutoTerminated(undef)) {
+					return false;
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					if (!getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			return false;
+		} finally {
+			readLock.unlock();
+		}
+
+		return true;
+	}
+
+	@Override
+	public Collection<ValidationResult> getValidationErrors() {
+		final List<ValidationResult> results = new ArrayList<>();
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					results.add(result);
+				}
+			}
+
+			for (final Relationship relationship : getUndefinedRelationships()) {
+				if (!isAutoTerminated(relationship)) {
+					final ValidationResult error = new ValidationResult.Builder()
+						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
+						.subject("Relationship " + relationship.getName())
+						.valid(false)
+						.build();
+					results.add(error);
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					final int incomingConnCount = getIncomingConnections().size();
+					if (incomingConnCount != 0) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
+		} finally {
+			readLock.unlock();
+		}
+		return results;
+	}
+
+	@Override
+	public Requirement getInputRequirement() {
+		return inputRequirement;
+	}
+
+	/**
+	 * Establishes node equality (based on the processor's identifier)
+	 *
+	 * @param other node
+	 * @return true if equal
+	 */
+	@Override
+	public boolean equals(final Object other) {
+		if (!(other instanceof ProcessorNode)) {
+			return false;
+		}
+		final ProcessorNode on = (ProcessorNode) other;
+		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
+	}
+
+	@Override
+	public int hashCode() {
+		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
+	}
+
+	@Override
+	public Collection<Relationship> getRelationships() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().getRelationships();
+		}
+	}
+
+	@Override
+	public String toString() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().toString();
+		}
+	}
+
+	@Override
+	public ProcessGroup getProcessGroup() {
+		return processGroup.get();
+	}
+
+	@Override
+	public void setProcessGroup(final ProcessGroup group) {
+		writeLock.lock();
+		try {
+			this.processGroup.set(group);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			processor.onTrigger(context, sessionFactory);
+		}
+	}
+
+	@Override
+	public ConnectableType getConnectableType() {
+		return ConnectableType.PROCESSOR;
+	}
+
+	@Override
+	public void setScheduledState(final ScheduledState scheduledState) {
+		this.scheduledState.set(scheduledState);
+		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
+			yieldExpiration.set(0L);
+		}
+	}
+
+	@Override
+	public void setAnnotationData(final String data) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
+			}
+
+			this.annotationData.set(data);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public String getAnnotationData() {
+		return annotationData.get();
+	}
+
+	@Override
+	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
+		return getValidationErrors();
+	}
+
+	@Override
+	public void verifyCanDelete() throws IllegalStateException {
+		verifyCanDelete(false);
+	}
+
+	@Override
+	public void verifyCanDelete(final boolean ignoreConnections) {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is running");
+			}
+
+			if (!ignoreConnections) {
+				for (final Set<Connection> connectionSet : connections.values()) {
+					for (final Connection connection : connectionSet) {
+						connection.verifyCanDelete();
+					}
+				}
+
+				for (final Connection connection : incomingConnectionsRef.get()) {
+					if (connection.getSource().equals(this)) {
+						connection.verifyCanDelete();
+					} else {
+						throw new IllegalStateException(this + " is the destination of another component");
+					}
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart() {
+		readLock.lock();
+		try {
+			switch (getScheduledState()) {
+				case DISABLED:
+					throw new IllegalStateException(this + " cannot be started because it is disabled");
+				case RUNNING:
+					throw new IllegalStateException(this + " cannot be started because it is already running");
+				case STOPPED:
+					break;
+			}
+			verifyNoActiveThreads();
+
+			if (!isValid()) {
+				throw new IllegalStateException(this + " is not in a valid state");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
+		switch (getScheduledState()) {
+			case DISABLED:
+				throw new IllegalStateException(this + " cannot be started because it is disabled");
+			case RUNNING:
+				throw new IllegalStateException(this + " cannot be started because it is already running");
+			case STOPPED:
+				break;
+		}
+		verifyNoActiveThreads();
+
+		final Set<String> ids = new HashSet<>();
+		for (final ControllerServiceNode node : ignoredReferences) {
+			ids.add(node.getIdentifier());
+		}
+
+		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
+		for (final ValidationResult result : validationResults) {
+			if (!result.isValid()) {
+				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
+			}
+		}
+	}
+
+	@Override
+	public void verifyCanStop() {
+		if (getScheduledState() != ScheduledState.RUNNING) {
+			throw new IllegalStateException(this + " is not scheduled to run");
+		}
+	}
+
+	@Override
+	public void verifyCanUpdate() {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanEnable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.DISABLED) {
+				throw new IllegalStateException(this + " is not disabled");
+			}
+
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanDisable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.STOPPED) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	private void verifyNoActiveThreads() throws IllegalStateException {
+		final int threadCount = processScheduler.getActiveThreadCount(this);
+		if (threadCount > 0) {
+			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
+		}
+	}
+
+	@Override
+	public void verifyModifiable() throws IllegalStateException {
+		if (isRunning()) {
+			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index b766878..eccff79 100644
--- a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -31,6 +31,8 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -59,6 +61,7 @@ import com.maxmind.geoip2.record.Subdivision;
 @SideEffectFree
 @SupportsBatching
 @Tags({"geo", "enrich", "ip", "maxmind"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Looks up geolocation information for an IP address and adds the geo information to FlowFile attributes. The "
         + "geo data is provided as a MaxMind database. The attribute that contains the IP address to lookup is provided by the "
         + "'IP Address Attribute' property. If the name of the attribute provided is 'X', then the the attributes added by enrichment "


[15/19] nifi git commit: NIFI-810: rebased from master

Posted by ma...@apache.org.
NIFI-810: rebased from master


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/b974445d
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/b974445d
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/b974445d

Branch: refs/heads/master
Commit: b974445ddd38ec7e84995225b86987e6af1af52c
Parents: 5ecdb18 2215bc8
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:28:39 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:28:39 2015 -0400

----------------------------------------------------------------------
 .../nifi/controller/StandardProcessorNode.java    | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------



[09/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index 68155d1..98a56bf 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -16,33 +16,7 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.processor.DataUnit;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.Relationship;
 import java.io.BufferedWriter;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.Validator;
-import org.apache.nifi.expression.AttributeValueDecorator;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
-import org.apache.nifi.logging.ProcessorLog;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.OutputStreamCallback;
-import org.apache.nifi.processor.io.StreamCallback;
-import org.apache.nifi.processor.util.FlowFileFilters;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.processors.standard.util.NLKBufferedReader;
-import org.apache.nifi.util.StopWatch;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -58,9 +32,37 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.expression.AttributeValueDecorator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.DataUnit;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.processor.io.StreamCallback;
+import org.apache.nifi.processor.util.FlowFileFilters;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.standard.util.NLKBufferedReader;
+import org.apache.nifi.stream.io.StreamUtils;
+import org.apache.nifi.util.StopWatch;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of "
         + "the content that matches the Regular Expression with some alternate value.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
index 04a9c56..f68ac6c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
@@ -39,12 +39,19 @@ import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -52,21 +59,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
-import org.apache.commons.lang3.StringUtils;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex", "Mapping"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that "
         + "matches the Regular Expression with some alternate value provided in a mapping file.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
index 7055a8a..d681793 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
@@ -29,6 +29,8 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -59,6 +61,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "routing", "Attribute Expression Language", "regexp", "regex", "Regular Expression", "Expression Language"})
 @CapabilityDescription("Routes FlowFiles based on their Attributes using the Attribute Expression Language")
 @DynamicProperty(name = "Relationship Name", value = "Attribute Expression Language", supportsExpressionLanguage = true, description = "Routes FlowFiles whose "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
index 937bc69..c63839c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
@@ -29,10 +29,18 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.DynamicRelationship;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -40,20 +48,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.DynamicRelationship;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.IntegerHolder;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"route", "content", "regex", "regular expression", "regexp"})
 @CapabilityDescription("Applies Regular Expressions to the content of a FlowFile and routes a copy of the FlowFile to each "
         + "destination whose Regular Expression matches. Regular Expressions are added as User-Defined Properties where the name "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
index 1f0fc7b..aa88827 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
@@ -32,28 +32,31 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
-import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
+import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"scan", "attributes", "search", "lookup"})
 @CapabilityDescription("Scans the specified attributes of FlowFiles, checking to see if any of their values are "
         + "present within the specified dictionary of terms")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
index 445249b..6fe8446 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
@@ -35,11 +35,13 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -63,6 +65,7 @@ import org.apache.nifi.util.search.ahocorasick.SearchState;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"aho-corasick", "scan", "content", "byte sequence", "search", "find", "dictionary"})
 @CapabilityDescription("Scans the content of FlowFiles for terms that are found in a user-supplied dictionary. If a term is matched, the UTF-8 "
         + "encoded version of the term will be added to the FlowFile using the 'matching.term' attribute")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
index e5e90ea..7b1103f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
@@ -26,13 +26,15 @@ import java.util.Set;
 import java.util.UUID;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -48,6 +50,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"segment", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Segments a FlowFile into multiple smaller segments on byte boundaries. Each segment is given the following attributes: "
         + "fragment.identifier, fragment.index, fragment.count, segment.original.filename; these attributes can then be used by the "
         + "MergeContent processor in order to reconstitute the original FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
index 3da1bd5..3cdf787 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
@@ -33,14 +33,16 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
@@ -64,6 +66,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"content", "split", "binary"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits incoming FlowFiles by a specified byte sequence")
 @WritesAttributes({
     @WritesAttribute(attribute = "fragment.identifier", description = "All split FlowFiles produced from the same parent FlowFile will have the same randomly generated UUID added for this attribute"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
index a3a4ed8..dfd09a2 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
@@ -16,12 +16,21 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -38,21 +47,16 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"json", "split", "jsonpath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a JSON File into multiple, separate FlowFiles for an array element specified by a JsonPath expression. "
         + "Each generated FlowFile is comprised of an element of the specified array and transferred to relationship 'split,' "
         + "with the original file transferred to the 'original' relationship. If the specified JsonPath is not found or "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
index 56bd729..e966880 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
@@ -16,50 +16,53 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
-import org.apache.nifi.stream.io.ByteArrayOutputStream;
-import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
+import org.apache.nifi.stream.io.ByteArrayOutputStream;
+import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.util.IntegerHolder;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.UUID;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"split", "text"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a text file into multiple smaller text files on line boundaries, each having up to a configured number of lines")
 @WritesAttributes({
     @WritesAttribute(attribute = "text.line.count", description = "The number of lines of text from the original FlowFile that were copied to this FlowFile"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index 617fcbe..a8453bb 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -29,27 +29,28 @@ import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.XmlElementNotifier;
+import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.util.BooleanHolder;
-
-import org.apache.commons.lang3.StringEscapeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
@@ -63,6 +64,7 @@ import org.xml.sax.XMLReader;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits an XML File into multiple separate FlowFiles, each comprising a child or descendant of the original root element")
 public class SplitXml extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
index fc4730c..e77dfc6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
@@ -35,6 +35,8 @@ import javax.xml.transform.stream.StreamSource;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -62,6 +64,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "xslt", "transform"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Applies the provided XSLT file to the flowfile XML payload. A new FlowFile is created "
         + "with transformed content and is routed to the 'success' relationship. If the XSL transform "
         + "fails, the original FlowFile is routed to the 'failure' relationship")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
index ff4d936..e94853b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
@@ -35,14 +35,16 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -67,6 +69,7 @@ import org.apache.nifi.util.ObjectHolder;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Unpack", "un-merge", "tar", "zip", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Unpacks the content of FlowFiles that have been packaged with one of several different Packaging Formats, emitting one to many "
         + "FlowFiles for each input FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
index d505898..3693590 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
@@ -31,6 +31,14 @@ import javax.xml.validation.Schema;
 import javax.xml.validation.SchemaFactory;
 import javax.xml.validation.Validator;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -39,21 +47,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.BooleanHolder;
-
 import org.xml.sax.SAXException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"xml", "schema", "validation", "xsd"})
 @CapabilityDescription("Validates the contents of FlowFiles against a user-specified XML Schema file")
 public class ValidateXml extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
index dd81289..8cf5726 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
@@ -31,9 +31,13 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -57,11 +61,9 @@ import org.apache.nifi.search.Searchable;
 import org.apache.nifi.update.attributes.Action;
 import org.apache.nifi.update.attributes.Condition;
 import org.apache.nifi.update.attributes.Criteria;
-import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.FlowFilePolicy;
+import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.serde.CriteriaSerDe;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 
 /**
  * This processor supports updating flowfile attributes and can do so
@@ -116,6 +118,7 @@ import org.apache.nifi.annotation.behavior.WritesAttribute;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "modification", "update", "delete", "Attribute Expression Language"})
 @CapabilityDescription("Updates the Attributes for a FlowFile by using the Attribute Expression Language and/or deletes the attributes based on a regular expression")
 @DynamicProperty(name = "A FlowFile attribute to update", value = "The value to set it to", supportsExpressionLanguage = true,


[05/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/034ee6de
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/034ee6de
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/034ee6de

Branch: refs/heads/master
Commit: 034ee6de6bc4c6923a835fbeaab4fb05fd694434
Parents: 96764ed
Author: Mark Payne <ma...@hotmail.com>
Authored: Fri Sep 25 11:39:28 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Fri Sep 25 11:39:58 2015 -0400

----------------------------------------------------------------------
 .../annotation/behavior/InputRequirement.java   |   51 +
 .../src/main/asciidoc/developer-guide.adoc      |   11 +
 .../nifi/processors/avro/ConvertAvroToJSON.java |    3 +
 .../processors/avro/ExtractAvroMetadata.java    |   29 +-
 .../apache/nifi/processors/avro/SplitAvro.java  |   27 +-
 .../nifi/processors/aws/s3/FetchS3Object.java   |    3 +
 .../nifi/processors/aws/s3/PutS3Object.java     |    6 +-
 .../apache/nifi/processors/aws/sns/PutSNS.java  |    3 +
 .../nifi/processors/aws/sqs/DeleteSQS.java      |    3 +
 .../apache/nifi/processors/aws/sqs/GetSQS.java  |    5 +-
 .../apache/nifi/processors/aws/sqs/PutSQS.java  |    5 +-
 .../nifi/processors/flume/ExecuteFlumeSink.java |   14 +-
 .../processors/flume/ExecuteFlumeSource.java    |   14 +-
 .../apache/nifi/controller/ProcessorNode.java   |   89 +-
 .../nifi/controller/StandardProcessorNode.java  | 2440 +++++++++---------
 .../org/apache/nifi/processors/GeoEnrichIP.java |    3 +
 .../hadoop/CreateHadoopSequenceFile.java        |    4 +-
 .../nifi/processors/hadoop/FetchHDFS.java       |    3 +
 .../apache/nifi/processors/hadoop/GetHDFS.java  |    3 +
 .../apache/nifi/processors/hadoop/ListHDFS.java |    3 +
 .../apache/nifi/processors/hadoop/PutHDFS.java  |    3 +
 .../processors/hl7/ExtractHL7Attributes.java    |    3 +
 .../apache/nifi/processors/hl7/RouteHL7.java    |    3 +
 .../processors/image/ExtractImageMetadata.java  |   36 +-
 .../nifi/processors/image/ResizeImage.java      |   38 +-
 .../apache/nifi/processors/kafka/GetKafka.java  |   21 +-
 .../apache/nifi/processors/kafka/PutKafka.java  |   10 +-
 .../nifi/processors/kite/ConvertCSVToAvro.java  |   16 +-
 .../nifi/processors/kite/ConvertJSONToAvro.java |   14 +-
 .../processors/kite/StoreInKiteDataset.java     |    9 +-
 .../nifi/processors/yandex/YandexTranslate.java |    3 +
 .../nifi-pcap-processors/.gitignore             |    1 +
 .../nifi/processors/twitter/GetTwitter.java     |    5 +-
 .../apache/nifi/processors/solr/GetSolr.java    |   43 +-
 .../processors/solr/PutSolrContentStream.java   |   33 +-
 .../standard/Base64EncodeContent.java           |  171 +-
 .../processors/standard/CompressContent.java    |   15 +-
 .../nifi/processors/standard/ControlRate.java   |  683 ++---
 .../standard/ConvertCharacterSet.java           |    3 +
 .../processors/standard/ConvertJSONToSQL.java   |    3 +
 .../processors/standard/DetectDuplicate.java    |    3 +
 .../processors/standard/DistributeLoad.java     |    3 +
 .../processors/standard/DuplicateFlowFile.java  |    3 +
 .../nifi/processors/standard/EncodeContent.java |   15 +-
 .../processors/standard/EncryptContent.java     |    3 +
 .../processors/standard/EvaluateJsonPath.java   |   38 +-
 .../nifi/processors/standard/EvaluateXPath.java |   29 +-
 .../processors/standard/EvaluateXQuery.java     |   25 +-
 .../processors/standard/ExecuteProcess.java     |    3 +
 .../nifi/processors/standard/ExecuteSQL.java    |    3 +
 .../standard/ExecuteStreamCommand.java          |    7 +-
 .../nifi/processors/standard/ExtractText.java   |    3 +
 .../processors/standard/GenerateFlowFile.java   |   11 +-
 .../apache/nifi/processors/standard/GetFTP.java |   13 +-
 .../nifi/processors/standard/GetFile.java       |    7 +-
 .../nifi/processors/standard/GetHTTP.java       |    3 +
 .../nifi/processors/standard/GetJMSQueue.java   |    3 +
 .../nifi/processors/standard/GetJMSTopic.java   |    3 +
 .../nifi/processors/standard/GetSFTP.java       |    7 +-
 .../processors/standard/HandleHttpRequest.java  |    7 +-
 .../processors/standard/HandleHttpResponse.java |    5 +-
 .../nifi/processors/standard/HashAttribute.java |    5 +-
 .../nifi/processors/standard/HashContent.java   |    5 +-
 .../processors/standard/IdentifyMimeType.java   |    5 +-
 .../nifi/processors/standard/InvokeHTTP.java    |    3 +
 .../nifi/processors/standard/ListenHTTP.java    |   16 +-
 .../nifi/processors/standard/ListenUDP.java     |   18 +-
 .../nifi/processors/standard/LogAttribute.java  |   16 +-
 .../nifi/processors/standard/MergeContent.java  |   11 +-
 .../nifi/processors/standard/ModifyBytes.java   |   14 +-
 .../processors/standard/MonitorActivity.java    |   31 +-
 .../nifi/processors/standard/PostHTTP.java      |    3 +
 .../nifi/processors/standard/PutEmail.java      |    3 +
 .../apache/nifi/processors/standard/PutFTP.java |    3 +
 .../nifi/processors/standard/PutFile.java       |    3 +
 .../apache/nifi/processors/standard/PutJMS.java |    5 +-
 .../nifi/processors/standard/PutSFTP.java       |    3 +
 .../apache/nifi/processors/standard/PutSQL.java |    3 +
 .../nifi/processors/standard/ReplaceText.java   |   54 +-
 .../standard/ReplaceTextWithMapping.java        |   18 +-
 .../processors/standard/RouteOnAttribute.java   |    3 +
 .../processors/standard/RouteOnContent.java     |   19 +-
 .../nifi/processors/standard/ScanAttribute.java |   19 +-
 .../nifi/processors/standard/ScanContent.java   |    5 +-
 .../processors/standard/SegmentContent.java     |    7 +-
 .../nifi/processors/standard/SplitContent.java  |    7 +-
 .../nifi/processors/standard/SplitJson.java     |   32 +-
 .../nifi/processors/standard/SplitText.java     |   53 +-
 .../nifi/processors/standard/SplitXml.java      |   18 +-
 .../nifi/processors/standard/TransformXml.java  |    3 +
 .../nifi/processors/standard/UnpackContent.java |    9 +-
 .../nifi/processors/standard/ValidateXml.java   |   16 +-
 .../processors/attributes/UpdateAttribute.java  |    9 +-
 93 files changed, 2418 insertions(+), 2027 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
----------------------------------------------------------------------
diff --git a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
new file mode 100644
index 0000000..97e6b88
--- /dev/null
+++ b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
@@ -0,0 +1,51 @@
+package org.apache.nifi.annotation.behavior;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * <p>
+ * Marker interface that a Processor can use to indicate whether it accepts, requires, or forbids
+ * input from other Processors. This information is used by the framework in order to ensure that
+ * a Processor is marked as invalid if it is missing necessary input or has input that will be ignored.
+ * This information also is used by the NiFi UI in order to prevent users from making connections
+ * to Processors that don't make sense.
+ * </p>
+ */
+@Documented
+@Target({ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+@Inherited
+public @interface InputRequirement {
+	Requirement value();
+	
+	public static enum Requirement {
+		/**
+		 * This value is used to indicate that the Processor requires input from other Processors
+		 * in order to run. As a result, the Processor will not be valid if it does not have any
+		 * incoming connections.
+		 */
+		INPUT_REQUIRED,
+		
+		/**
+		 * This value is used to indicate that the Processor will consume data from an incoming
+		 * connection but does not require an incoming connection in order to perform its task.
+		 * If the {@link InputRequirement} annotation is not present, this is the default value
+		 * that is used.
+		 */
+		INPUT_ALLOWED,
+		
+		/**
+		 * This value is used to indicate that the Processor is a "Source Processor" and does
+		 * not accept incoming connections. Because the Processor does not pull FlowFiles from
+		 * an incoming connection, it can be very confusing for users who create incoming connections
+		 * to the Processor. As a result, this value can be used in order to clarify that incoming
+		 * connections will not be used. This prevents the user from even creating such a connection.
+		 */
+		INPUT_FORBIDDEN;
+	}
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-docs/src/main/asciidoc/developer-guide.adoc
----------------------------------------------------------------------
diff --git a/nifi-docs/src/main/asciidoc/developer-guide.adoc b/nifi-docs/src/main/asciidoc/developer-guide.adoc
index f9950d5..28df5c2 100644
--- a/nifi-docs/src/main/asciidoc/developer-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/developer-guide.adoc
@@ -1633,6 +1633,17 @@ will handle your Processor:
 		not there is any data on an input queue. This is useful, for example, if the Processor needs to be triggered to run
 		periodically to time out a network connection.
 
+    - `InputRequirement`: By default, all Processors will allow users to create incoming connections for the Processor, but
+        if the user does not create an incoming connection, the Processor is still valid and can be scheduled to run. For Processors
+        that are expected to be used as a "Source Processor," though, this can be confusing to the user, and the user may attempt to
+        send FlowFiles to that Processor, only for the FlowFiles to queue up without being processed. Conversely, if the Processor
+        expects incoming FlowFiles but does not have an input queue, the Processor will be scheduled to run but will perform no work,
+        as it will receive no FlowFile, and this leads to confusion as well. As a result, we can use the `@InputRequirement` annotation
+        and provide it a value of `INPUT_REQUIRED`, `INPUT_ALLOWED`, or `INPUT_FORBIDDEN`. This provides information to the framework
+        about when the Processor should be made invalid, or whether or not the user should even be able to draw a Connection to the
+        Processor. For instance, if a Processor is annotated with `InputRequirement(Requirement.INPUT_FORBIDDEN)`, then the user will
+        not even be able to create a Connection with that Processor as the destination.
+
 
 === Data Buffering
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
index 8832a73..b214427 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
@@ -29,6 +29,8 @@ import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -46,6 +48,7 @@ import org.apache.nifi.processor.io.StreamCallback;
 @SideEffectFree
 @SupportsBatching
 @Tags({ "json", "avro", "binary" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Converts a Binary Avro record into a JSON object. This processor provides a direct mapping of an Avro field to a JSON field, such "
     + "that the resulting JSON will have the same hierarchical structure as the Avro document. Note that the Avro schema information will be lost, as this "
     + "is not a translation from binary Avro to JSON formatted Avro. The output JSON is encoded the UTF-8 encoding. If an incoming FlowFile contains a stream of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
index 48aad7d..4cf5289 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
@@ -16,6 +16,19 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.SchemaNormalization;
 import org.apache.avro.file.DataFileStream;
@@ -23,6 +36,8 @@ import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -41,22 +56,10 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "schema", "metadata" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Extracts metadata from the header of an Avro datafile.")
 @WritesAttributes({
         @WritesAttribute(attribute = "schema.type", description = "The type of the schema (i.e. record, enum, etc.)."),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
index 3b344b5..dbf5778 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
@@ -16,6 +16,18 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.file.DataFileStream;
@@ -26,6 +38,8 @@ import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.io.EncoderFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -45,21 +59,10 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "split" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a binary encoded Avro datafile into smaller files based on the configured Output Size. The Output Strategy determines if " +
         "the smaller files will be Avro datafiles, or bare Avro records with metadata in the FlowFile attributes. The output will always be binary encoded.")
 public class SplitAvro extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
index 2406b67..131e671 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -45,6 +47,7 @@ import com.amazonaws.services.s3.model.S3Object;
 
 @SupportsBatching
 @SeeAlso({PutS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Get", "Fetch"})
 @CapabilityDescription("Retrieves the contents of an S3 Object and writes it to the content of a FlowFile")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 24c82dd..7398c4e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -54,6 +56,7 @@ import com.amazonaws.services.s3.model.StorageClass;
 
 @SupportsBatching
 @SeeAlso({FetchS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Archive", "Put"})
 @CapabilityDescription("Puts FlowFiles to an Amazon S3 Bucket")
 @DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
@@ -101,7 +104,8 @@ public class PutS3Object extends AbstractS3Processor {
                 .build();
     }
 
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
+    @Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
         FlowFile flowFile = session.get();
         if (flowFile == null) {
             return;

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
index 7d42703..e571ff4 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
@@ -23,6 +23,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import com.amazonaws.services.sns.model.PublishRequest;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"amazon", "aws", "sns", "topic", "put", "publish", "pubsub"})
 @CapabilityDescription("Sends the content of a FlowFile as a notification to the Amazon Simple Notification Service")
 public class PutSNS extends AbstractSNSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
index 65e020d..f88aa71 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
@@ -21,6 +21,8 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -37,6 +39,7 @@ import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Delete"})
 @CapabilityDescription("Deletes a message from an Amazon Simple Queuing Service Queue")
 public class DeleteSQS extends AbstractSQSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
index 7c2dd2d..a140999 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -51,8 +53,9 @@ import com.amazonaws.services.sqs.model.ReceiveMessageRequest;
 import com.amazonaws.services.sqs.model.ReceiveMessageResult;
 
 @SupportsBatching
+@SeeAlso({ PutSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Get", "Fetch", "Poll"})
-@SeeAlso({PutSQS.class, DeleteSQS.class})
 @CapabilityDescription("Fetches messages from an Amazon Simple Queuing Service Queue")
 @WritesAttributes({
     @WritesAttribute(attribute = "hash.value", description = "The MD5 sum of the message"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
index 3961f32..0af508e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
@@ -28,6 +28,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -45,8 +47,9 @@ import com.amazonaws.services.sqs.model.SendMessageBatchRequest;
 import com.amazonaws.services.sqs.model.SendMessageBatchRequestEntry;
 
 @SupportsBatching
+@SeeAlso({ GetSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Put", "Publish"})
-@SeeAlso({GetSQS.class, DeleteSQS.class})
 @CapabilityDescription("Publishes a message to an Amazon Simple Queuing Service Queue")
 @DynamicProperty(name = "The name of a Message Attribute to add to the message", value = "The value of the Message Attribute",
         description = "Allows the user to add key/value pairs as Message Attributes by adding a property whose name will become the name of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
index 57e0278..f93b215 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
@@ -16,20 +16,19 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.Sink;
 import org.apache.flume.conf.Configurables;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -40,12 +39,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume sink
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "put", "sink"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Execute a Flume sink. Each input FlowFile is converted into a Flume Event for processing by the sink.")
-@TriggerSerially
 public class ExecuteFlumeSink extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SINK_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
index 600f4b1..3aad6b7 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
@@ -16,12 +16,10 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.EventDrivenSource;
 import org.apache.flume.PollableSource;
@@ -29,12 +27,13 @@ import org.apache.flume.Source;
 import org.apache.flume.channel.ChannelProcessor;
 import org.apache.flume.conf.Configurables;
 import org.apache.flume.source.EventDrivenSourceRunner;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -46,12 +45,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume source
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "get", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Execute a Flume source. Each Flume Event is sent to the success relationship as a FlowFile")
-@TriggerSerially
 public class ExecuteFlumeSource extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SOURCE_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
index f2a83d0..2f72d0f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
@@ -20,6 +20,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.connectable.Connectable;
 import org.apache.nifi.controller.service.ControllerServiceNode;
 import org.apache.nifi.controller.service.ControllerServiceProvider;
@@ -30,70 +31,72 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
 
 public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
 
-    public ProcessorNode(final Processor processor, final String id,
-            final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
-        super(processor, id, validationContextFactory, serviceProvider);
-    }
+	public ProcessorNode(final Processor processor, final String id,
+		final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
+		super(processor, id, validationContextFactory, serviceProvider);
+	}
 
-    public abstract boolean isIsolated();
+	public abstract boolean isIsolated();
 
-    public abstract boolean isTriggerWhenAnyDestinationAvailable();
+	public abstract boolean isTriggerWhenAnyDestinationAvailable();
 
-    @Override
-    public abstract boolean isSideEffectFree();
+	@Override
+	public abstract boolean isSideEffectFree();
 
-    public abstract boolean isTriggeredSerially();
+	public abstract boolean isTriggeredSerially();
 
-    public abstract boolean isEventDrivenSupported();
+	public abstract boolean isEventDrivenSupported();
 
-    public abstract boolean isHighThroughputSupported();
+	public abstract boolean isHighThroughputSupported();
 
-    @Override
-    public abstract boolean isValid();
+	public abstract Requirement getInputRequirement();
 
-    public abstract void setScheduledState(ScheduledState scheduledState);
+	@Override
+	public abstract boolean isValid();
 
-    public abstract void setBulletinLevel(LogLevel bulletinLevel);
+	public abstract void setScheduledState(ScheduledState scheduledState);
 
-    public abstract LogLevel getBulletinLevel();
+	public abstract void setBulletinLevel(LogLevel bulletinLevel);
 
-    public abstract Processor getProcessor();
+	public abstract LogLevel getBulletinLevel();
 
-    public abstract void yield(long period, TimeUnit timeUnit);
+	public abstract Processor getProcessor();
 
-    public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
+	public abstract void yield(long period, TimeUnit timeUnit);
 
-    public abstract Set<Relationship> getAutoTerminatedRelationships();
+	public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
 
-    public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
+	public abstract Set<Relationship> getAutoTerminatedRelationships();
 
-    @Override
-    public abstract SchedulingStrategy getSchedulingStrategy();
+	public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
 
-    public abstract void setRunDuration(long duration, TimeUnit timeUnit);
+	@Override
+	public abstract SchedulingStrategy getSchedulingStrategy();
 
-    public abstract long getRunDuration(TimeUnit timeUnit);
+	public abstract void setRunDuration(long duration, TimeUnit timeUnit);
 
-    public abstract Map<String, String> getStyle();
+	public abstract long getRunDuration(TimeUnit timeUnit);
 
-    public abstract void setStyle(Map<String, String> style);
+	public abstract Map<String, String> getStyle();
 
-    /**
-     * @return the number of threads (concurrent tasks) currently being used by
-     * this Processor
-     */
-    public abstract int getActiveThreadCount();
+	public abstract void setStyle(Map<String, String> style);
 
-    /**
-     * Verifies that this Processor can be started if the provided set of
-     * services are enabled. This is introduced because we need to verify that
-     * all components can be started before starting any of them. In order to do
-     * that, we need to know that this component can be started if the given
-     * services are enabled, as we will then enable the given services before
-     * starting this component.
-     *
-     * @param ignoredReferences to ignore
-     */
-    public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
+	/**
+	 * @return the number of threads (concurrent tasks) currently being used by
+	 * this Processor
+	 */
+	public abstract int getActiveThreadCount();
+
+	/**
+	 * Verifies that this Processor can be started if the provided set of
+	 * services are enabled. This is introduced because we need to verify that
+	 * all components can be started before starting any of them. In order to do
+	 * that, we need to know that this component can be started if the given
+	 * services are enabled, as we will then enable the given services before
+	 * starting this component.
+	 *
+	 * @param ignoredReferences to ignore
+	 */
+	public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
 
 }


[02/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
index b825972..39dc725 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
@@ -28,11 +28,13 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.distributed.cache.client.Deserializer;
 import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"hash", "dupe", "duplicate", "dedupe"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Caches a value, computed from FlowFile attributes, for each incoming FlowFile and determines if the cached value has already been seen. "
         + "If so, routes the FlowFile to 'duplicate' with an attribute named 'original.identifier' that specifies the original FlowFile's"
         + "\"description\", which is specified in the <FlowFile Description> property. If the FlowFile is not determined to be a duplicate, the Processor "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
index afff3c4..73ada84 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
@@ -32,9 +32,11 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -57,6 +59,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @TriggerWhenAnyDestinationAvailable
 @Tags({"distribute", "load balance", "route", "round robin", "weighted"})
 @CapabilityDescription("Distributes FlowFiles to downstream processors based on a Distribution Strategy. If using the Round Robin "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
index 7400821..021a94f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
@@ -21,7 +21,9 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -36,6 +38,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"test", "load", "duplicate"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Intended for load testing, this processor will create the configured number of copies of each incoming FlowFile")
 public class DuplicateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
index 67c2214..de81fe5 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
@@ -26,13 +26,20 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Base32InputStream;
 import org.apache.commons.codec.binary.Base32OutputStream;
-
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.commons.codec.binary.Hex;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -41,11 +48,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processors.standard.util.ValidatingBase32InputStream;
 import org.apache.nifi.processors.standard.util.ValidatingBase64InputStream;
@@ -55,6 +57,7 @@ import org.apache.nifi.util.StopWatch;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encode", "decode", "base64", "hex"})
 @CapabilityDescription("Encodes the FlowFile content in base64")
 public class EncodeContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
index 6492d0a..7b98189 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
@@ -27,6 +27,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -55,6 +57,7 @@ import org.bouncycastle.jce.provider.BouncyCastleProvider;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encryption", "decryption", "password", "JCE", "OpenPGP", "PGP", "GPG"})
 @CapabilityDescription("Encrypts or Decrypts a FlowFile using either symmetric encryption with a password and randomly generated salt, or asymmetric encryption using a public and secret key.")
 public class EncryptContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
index ad3120c..db60f13 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
@@ -16,13 +16,25 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -42,24 +54,16 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"JSON", "evaluate", "JsonPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more JsonPath expressions against the content of a FlowFile. "
         + "The results of those expressions are assigned to FlowFile Attributes or are written to the content of the FlowFile itself, "
         + "depending on configuration of the Processor. "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
index 80b1795..6b3c514 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
@@ -49,40 +49,43 @@ import javax.xml.xpath.XPathExpressionException;
 import javax.xml.xpath.XPathFactory;
 import javax.xml.xpath.XPathFactoryConfigurationException;
 
-import net.sf.saxon.lib.NamespaceConstant;
-import net.sf.saxon.xpath.XPathEvaluator;
-
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.lib.NamespaceConstant;
+import net.sf.saxon.xpath.XPathEvaluator;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more XPaths against the content of a FlowFile. The results of those XPaths are assigned to "
         + "FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of the "
         + "Processor. XPaths are entered by adding user-defined properties; the name of the property maps to the Attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
index 3291b55..f8db8f8 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
@@ -40,23 +40,15 @@ import javax.xml.transform.TransformerFactoryConfigurationError;
 import javax.xml.transform.sax.SAXSource;
 import javax.xml.transform.stream.StreamResult;
 
-import net.sf.saxon.s9api.DOMDestination;
-import net.sf.saxon.s9api.Processor;
-import net.sf.saxon.s9api.SaxonApiException;
-import net.sf.saxon.s9api.XQueryCompiler;
-import net.sf.saxon.s9api.XQueryEvaluator;
-import net.sf.saxon.s9api.XQueryExecutable;
-import net.sf.saxon.s9api.XdmItem;
-import net.sf.saxon.s9api.XdmNode;
-import net.sf.saxon.s9api.XdmValue;
-
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -78,10 +70,21 @@ import org.apache.nifi.util.ObjectHolder;
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.s9api.DOMDestination;
+import net.sf.saxon.s9api.Processor;
+import net.sf.saxon.s9api.SaxonApiException;
+import net.sf.saxon.s9api.XQueryCompiler;
+import net.sf.saxon.s9api.XQueryEvaluator;
+import net.sf.saxon.s9api.XQueryExecutable;
+import net.sf.saxon.s9api.XdmItem;
+import net.sf.saxon.s9api.XdmNode;
+import net.sf.saxon.s9api.XdmValue;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath", "XQuery"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription(
         "Evaluates one or more XQueries against the content of a FlowFile.  The results of those XQueries are assigned "
         + "to FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
index c8a67a0..fd6bb05 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
@@ -43,6 +43,8 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -60,6 +62,7 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.ArgumentUtils;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"command", "process", "source", "external", "invoke", "script"})
 @CapabilityDescription("Runs an operating system command specified by the user and writes the output of that command to a FlowFile. If the command is expected "
         + "to be long-running, the Processor can output the partial data on a specified interval. When this option is used, the output is expected to be in textual "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
index 45fd1a8..5e25bdd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
@@ -30,6 +30,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -48,6 +50,7 @@ import org.apache.nifi.util.LongHolder;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({ "sql", "select", "jdbc", "query", "database" })
 @CapabilityDescription("Execute provided SQL select query. Query result will be converted to Avro format."
     + " Streaming is used so arbitrarily large result sets are supported. This processor can be scheduled to run on " +

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
index 633ce61..9bea6ba 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
@@ -35,11 +35,13 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -119,6 +121,7 @@ import org.apache.nifi.stream.io.StreamUtils;
  */
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"command execution", "command", "stream", "execute"})
 @CapabilityDescription("Executes an external command on the contents of a flow file, and creates a new flow file with the results of the command.")
 @DynamicProperty(name = "An environment variable name", value = "An environment variable value", description = "These environment variables are passed to the process spawned by this Processor")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
index 29b9c20..9583b8e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
@@ -34,6 +34,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -56,6 +58,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"evaluate", "extract", "Text", "Regular Expression", "regex"})
 @CapabilityDescription(
         "Evaluates one or more Regular Expressions against the content of a FlowFile.  "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
index aa1206a..4feee1b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
@@ -26,6 +26,12 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.processor.AbstractProcessor;
@@ -34,15 +40,12 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"test", "random", "generate"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("This processor creates FlowFiles of random data and is used for load testing")
 public class GenerateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
index ff5b599..7c78faa 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
@@ -20,17 +20,20 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"FTP", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an FTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
index 0fa9178..ced79cd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
@@ -49,12 +49,14 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
@@ -70,6 +72,7 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"local", "files", "filesystem", "ingest", "ingress", "get", "source", "input"})
 @CapabilityDescription("Creates FlowFiles from files in a directory.  NiFi will ignore files it doesn't have at least read permissions for.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
index 7099552..48ca2de 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
@@ -69,6 +69,8 @@ import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
 import org.apache.http.impl.client.BasicCredentialsProvider;
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -94,6 +96,7 @@ import org.apache.nifi.ssl.SSLContextService.ClientAuth;
 import org.apache.nifi.util.StopWatch;
 
 @Tags({"get", "fetch", "poll", "http", "https", "ingest", "source", "input"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches a file via HTTP")
 @WritesAttributes({
     @WritesAttribute(attribute = "filename", description = "The filename is set to the name of the file on the remote server"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
index 6be505a..0ba7f98 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
@@ -21,6 +21,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 
 import javax.jms.JMSException;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -34,6 +36,7 @@ import org.apache.nifi.processors.standard.util.JmsFactory;
 import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "queue", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Queue, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
index e7209cc..272c7ab 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
@@ -41,6 +41,8 @@ import javax.jms.InvalidDestinationException;
 import javax.jms.JMSException;
 import javax.jms.Session;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -60,6 +62,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "topic", "subscription", "durable", "non-durable", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Topic, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
index 7841bec..63256f3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
@@ -21,11 +21,13 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -34,6 +36,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an SFTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
index 2583e88..49bad40 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
@@ -44,11 +44,13 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
@@ -75,6 +77,7 @@ import org.eclipse.jetty.util.ssl.SslContextFactory;
 
 import com.sun.jersey.api.client.ClientResponse.Status;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"http", "https", "request", "listen", "ingress", "web service"})
 @CapabilityDescription("Starts an HTTP Server and listens for HTTP Requests. For each request, creates a FlowFile and transfers to 'success'. "
         + "This Processor is designed to be used in conjunction with the HandleHttpResponse Processor in order to create a Web Service")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
index 6de3fe6..a4317dc 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
@@ -27,8 +27,10 @@ import java.util.regex.Pattern;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "response", "egress", "web service"})
 @CapabilityDescription("Sends an HTTP Response to the Requestor that generated a FlowFile. This Processor is designed to be used in conjunction with "
         + "the HandleHttpRequest in order to create a web service.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
index b3dbf83..a0c603c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
@@ -33,11 +33,13 @@ import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -96,6 +98,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "hash"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Hashes together the key/value pairs of several FlowFile Attributes and adds the hash as a new attribute. "
         + "Optional properties are to be added such that the name of the property is the name of a FlowFile Attribute to consider "
         + "and the value of the property is a regular expression that, if matched by the attribute value, will cause that attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
index 526754e..9885599 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
@@ -29,10 +29,12 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -50,6 +52,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hash", "content", "MD5", "SHA-1", "SHA-256"})
 @CapabilityDescription("Calculates a hash value for the Content of a FlowFile and puts that hash value on the FlowFile as an attribute whose name "
         + "is determined by the <Hash Attribute Name> property")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
index 5f16ff3..d09117d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
@@ -24,11 +24,13 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.logging.ProcessorLog;
@@ -65,6 +67,7 @@ import org.apache.tika.mime.MimeTypeException;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"compression", "gzip", "bzip2", "zip", "MIME", "mime.type", "file", "identify"})
 @CapabilityDescription("Attempts to identify the MIME Type used for a FlowFile. If the MIME Type can be identified, "
         + "an attribute with the name 'mime.type' is added with the value being the MIME Type. If the MIME Type cannot be determined, "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
index f16eb9c..a06b3d6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
@@ -54,6 +54,8 @@ import javax.net.ssl.SSLSession;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -77,6 +79,7 @@ import org.joda.time.format.DateTimeFormatter;
 
 @SupportsBatching
 @Tags({"http", "https", "rest", "client"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("An HTTP client processor which converts FlowFile attributes to HTTP headers, with configurable HTTP method, url, etc.")
 @WritesAttributes({
     @WritesAttribute(attribute = "invokehttp.status.code", description = "The status code that is returned"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
index c7842d9..258e122 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
@@ -31,10 +31,14 @@ import java.util.regex.Pattern;
 import javax.servlet.Servlet;
 import javax.ws.rs.Path;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
-import org.apache.nifi.stream.io.StreamThrottler;
 import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
@@ -42,15 +46,12 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.servlets.ContentAcknowledgmentServlet;
 import org.apache.nifi.processors.standard.servlets.ListenHTTPServlet;
 import org.apache.nifi.ssl.SSLContextService;
-
+import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
+import org.apache.nifi.stream.io.StreamThrottler;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.HttpConfiguration;
 import org.eclipse.jetty.server.HttpConnectionFactory;
@@ -62,6 +63,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"ingest", "http", "https", "rest", "listen"})
 @CapabilityDescription("Starts an HTTP Server that is used to receive FlowFiles from remote sources. The URL of the Service will be http://{hostname}:{port}/contentListener")
 public class ListenHTTP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
index 6a88bd4..b620dd3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
@@ -41,6 +41,15 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -58,19 +67,11 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.UDPStreamConsumer;
 import org.apache.nifi.util.Tuple;
 
-import org.apache.commons.lang3.StringUtils;
-
 /**
  * <p>
  * This processor listens for Datagram Packets on a given port and concatenates the contents of those packets together generating flow files roughly as often as the internal buffer fills up or until
@@ -113,6 +114,7 @@ import org.apache.commons.lang3.StringUtils;
  */
 @TriggerWhenEmpty
 @Tags({"ingest", "udp", "listen", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Listens for Datagram Packets on a given port and concatenates the contents of those packets "
         + "together generating flow files")
 public class ListenUDP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
index 6d0b643..5cd5b14 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
@@ -27,6 +27,14 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -35,22 +43,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.eclipse.jetty.util.StringUtil;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "logging"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class LogAttribute extends AbstractProcessor {
 
     public static final PropertyDescriptor LOG_LEVEL = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index e9258df..2cad11e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -48,15 +48,17 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
-import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
@@ -86,6 +88,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @SideEffectFree
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"merge", "content", "correlation", "tar", "zip", "stream", "concatenation", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Merges a Group of FlowFiles together based on a user-defined strategy and packages them into a single FlowFile. "
         + "It is recommended that the Processor be configured with only a single incoming connection, as Group of FlowFiles will not be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
index be21b32..e0efa3d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
@@ -25,28 +25,32 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
 @SideEffectFree
 @Tags({"binary", "discard", "keep"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Keep or discard bytes range from a binary file.")
 public class ModifyBytes extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
index 2900623..426b792 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
@@ -16,6 +16,22 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
@@ -36,23 +52,10 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
 @SideEffectFree
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"monitor", "flow", "active", "inactive", "activity", "detection"})
 @CapabilityDescription("Monitors the flow for activity and sends out an indicator when the flow has not had any data for "
         + "some specified amount of time and again when the flow's activity is restored")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
index 51f28e0..ef84629 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
@@ -82,6 +82,8 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.protocol.HttpCoreContext;
 import org.apache.http.util.EntityUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -121,6 +123,7 @@ import org.apache.nifi.util.StopWatch;
 import com.sun.jersey.api.client.ClientResponse.Status;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "remote", "copy", "archive"})
 @CapabilityDescription("Performs an HTTP Post with the content of the FlowFile")
 @ReadsAttribute(attribute = "mime.type", description = "If not sending data as a FlowFile, the mime.type attribute will be used to set the HTTP Header for Content-Type")

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
index 7e2dd31..5605b8d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
@@ -47,6 +47,8 @@ import javax.mail.internet.PreencodedMimeBodyPart;
 import javax.mail.util.ByteArrayDataSource;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -67,6 +69,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"email", "put", "notify", "smtp"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends an e-mail to configured recipients for each incoming FlowFile")
 public class PutEmail extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
index b959efa..1679982 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
@@ -28,6 +28,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "ftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an FTP Server")
 @SeeAlso(GetFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
index 3bbe093..8c4b00f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
@@ -34,6 +34,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"put", "local", "copy", "archive", "files", "filesystem"})
 @CapabilityDescription("Writes the contents of a FlowFile to the local file system")
 @SeeAlso(GetFile.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
index 034a3fc..dff5a6b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
@@ -39,9 +39,9 @@ import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TTL
 import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TYPE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_BYTE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_EMPTY;
+import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_STREAM;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_TEXT;
-import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.PASSWORD;
 import static org.apache.nifi.processors.standard.util.JmsProperties.REPLY_TO_QUEUE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.TIMEOUT;
@@ -70,6 +70,8 @@ import javax.jms.MessageProducer;
 import javax.jms.Session;
 import javax.jms.StreamMessage;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -89,6 +91,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageProducer;
 import org.apache.nifi.stream.io.StreamUtils;
 
 @Tags({"jms", "send", "put"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Creates a JMS Message from the contents of a FlowFile and sends the message to a JMS Server")
 @SeeAlso({GetJMSQueue.class, GetJMSTopic.class})
 public class PutJMS extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
index 97fe7e5..48cfc26 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -31,6 +33,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "sftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an SFTP Server")
 @SeeAlso(GetSFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/034ee6de/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
index b087737..0913f86 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
@@ -45,6 +45,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
@@ -68,6 +70,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 
 @SupportsBatching
 @SeeAlso(ConvertJSONToSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"sql", "put", "rdbms", "database", "update", "insert", "relational"})
 @CapabilityDescription("Executes a SQL UPDATE or INSERT command. The content of an incoming FlowFile is expected to be the SQL command "
         + "to execute. The SQL command may use the ? to escape parameters. In this case, the parameters to use must exist as FlowFile attributes "


[18/19] nifi git commit: NIFI-810: Merged master into branch

Posted by ma...@apache.org.
NIFI-810: Merged master into branch


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/0636f0e7
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/0636f0e7
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/0636f0e7

Branch: refs/heads/master
Commit: 0636f0e731cd28299edd3a6e9db90de5045ab662
Parents: 8e2308b d63cd6b
Author: Mark Payne <ma...@hotmail.com>
Authored: Sun Oct 25 11:02:40 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Sun Oct 25 11:02:40 2015 -0400

----------------------------------------------------------------------
 .../src/main/asciidoc/administration-guide.adoc |   4 +-
 .../src/main/java/MyProcessor.java              |  11 +-
 .../nifi/processors/avro/ConvertAvroToJSON.java |  67 ++++-
 .../processors/avro/TestConvertAvroToJSON.java  |  47 ++-
 .../processors/aws/AbstractAWSProcessor.java    |   2 +-
 .../nifi/processors/aws/s3/DeleteS3Object.java  |  98 ++++++
 .../org.apache.nifi.processor.Processor         |   1 +
 .../processors/aws/s3/TestDeleteS3Object.java   | 141 +++++++++
 .../nifi/controller/FlowUnmarshaller.java       |  77 -----
 .../src/main/resources/FlowConfiguration.xsd    |   2 +-
 .../src/main/resources/bin/nifi.sh              |  96 +++---
 .../canvas/new-controller-service-dialog.jsp    |   1 -
 .../partials/canvas/new-processor-dialog.jsp    |   1 -
 .../canvas/new-reporting-task-dialog.jsp        |   1 -
 .../css/new-controller-service-dialog.css       |   9 -
 .../main/webapp/css/new-processor-dialog.css    |   9 -
 .../webapp/css/new-reporting-task-dialog.css    |   9 -
 .../webapp/js/nf/canvas/nf-canvas-toolbox.js    |  60 ++--
 .../src/main/webapp/js/nf/canvas/nf-settings.js | 140 +++++----
 .../processors/kite/AbstractKiteProcessor.java  |  11 +-
 .../nifi/processors/kite/ConvertCSVToAvro.java  | 296 ++++++++++---------
 .../processors/kite/TestCSVToAvroProcessor.java |  39 +++
 .../nifi-standard-prioritizers/pom.xml          |   4 +
 .../PriorityAttributePrioritizer.java           |   7 +-
 .../PriorityAttributePrioritizerTest.java       |  17 +-
 .../nifi-standard-processors/pom.xml            |   9 +
 .../nifi/processors/standard/ExecuteSQL.java    |   9 +-
 .../nifi/processors/standard/InvokeHTTP.java    |   1 +
 .../nifi/processors/standard/ListenHTTP.java    | 105 ++++---
 .../standard/PutDistributedMapCache.java        | 252 ++++++++++++++++
 .../servlets/ContentAcknowledgmentServlet.java  |   3 +-
 .../standard/servlets/ListenHTTPServlet.java    |   8 +-
 .../processors/standard/util/JdbcCommon.java    |  70 ++++-
 .../org.apache.nifi.processor.Processor         |   1 +
 .../nifi/processors/standard/TestGetFile.java   |  21 +-
 .../standard/TestPutDistributedMapCache.java    | 277 +++++++++++++++++
 .../standard/util/TestJdbcCommon.java           |  42 +++
 .../standard/util/TestJdbcTypesDerby.java       | 133 +++++++++
 .../standard/util/TestJdbcTypesH2.java          | 149 ++++++++++
 pom.xml                                         |   2 +-
 40 files changed, 1725 insertions(+), 507 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/0636f0e7/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
----------------------------------------------------------------------
diff --cc nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
index b214427,f0ba71a..f0f1630
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
@@@ -35,7 -36,8 +38,7 @@@ import org.apache.nifi.annotation.behav
  import org.apache.nifi.annotation.behavior.SupportsBatching;
  import org.apache.nifi.annotation.behavior.WritesAttribute;
  import org.apache.nifi.annotation.documentation.CapabilityDescription;
--import org.apache.nifi.annotation.documentation.Tags;
+ import org.apache.nifi.components.PropertyDescriptor;
  import org.apache.nifi.flowfile.FlowFile;
  import org.apache.nifi.flowfile.attributes.CoreAttributes;
  import org.apache.nifi.processor.AbstractProcessor;
@@@ -47,8 -50,7 +51,7 @@@ import org.apache.nifi.processor.io.Str
  
  @SideEffectFree
  @SupportsBatching
- @Tags({ "json", "avro", "binary" })
 -@Tags({"json", "avro", "binary"})
 +@InputRequirement(Requirement.INPUT_REQUIRED)
  @CapabilityDescription("Converts a Binary Avro record into a JSON object. This processor provides a direct mapping of an Avro field to a JSON field, such "
      + "that the resulting JSON will have the same hierarchical structure as the Avro document. Note that the Avro schema information will be lost, as this "
      + "is not a translation from binary Avro to JSON formatted Avro. The output JSON is encoded the UTF-8 encoding. If an incoming FlowFile contains a stream of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/0636f0e7/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
----------------------------------------------------------------------
diff --cc nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
index 6f126aa,ea84daa..43b33ff
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
@@@ -30,8 -30,7 +30,9 @@@ import org.apache.avro.Schema
  import org.apache.avro.file.CodecFactory;
  import org.apache.avro.file.DataFileWriter;
  import org.apache.avro.generic.GenericData.Record;
+ import org.apache.commons.lang3.StringEscapeUtils;
 +import org.apache.nifi.annotation.behavior.InputRequirement;
 +import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
  import org.apache.nifi.annotation.documentation.CapabilityDescription;
  import org.apache.nifi.annotation.documentation.Tags;
  import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@@ -68,114 -66,118 +69,108 @@@ public class ConvertCSVToAvro extends A
  
      private static final Validator CHAR_VALIDATOR = new Validator() {
          @Override
--        public ValidationResult validate(String subject, String input,
--                ValidationContext context) {
++        public ValidationResult validate(String subject, String input, ValidationContext context) {
+             // Allows special, escaped characters as input, which is then unescaped and converted to a single character.
+             // Examples for special characters: \t (or \u0009), \f.
+             input = unescapeString(input);
+ 
              return new ValidationResult.Builder()
--                    .subject(subject)
--                    .input(input)
-                     .explanation("Only single characters are supported")
-                     .valid(input.length() == 1)
 -                    .explanation("Only non-null single characters are supported")
 -                    .valid(input.length() == 1 && input.charAt(0) != 0)
--                    .build();
++                .subject(subject)
++                .input(input)
++                .explanation("Only non-null single characters are supported")
++                .valid(input.length() == 1 && input.charAt(0) != 0)
++                .build();
          }
      };
  
      private static final Relationship SUCCESS = new Relationship.Builder()
--            .name("success")
--            .description("Avro content that was converted successfully from CSV")
--            .build();
++        .name("success")
++        .description("Avro content that was converted successfully from CSV")
++        .build();
  
      private static final Relationship FAILURE = new Relationship.Builder()
--            .name("failure")
--            .description("CSV content that could not be processed")
--            .build();
++        .name("failure")
++        .description("CSV content that could not be processed")
++        .build();
  
      private static final Relationship INCOMPATIBLE = new Relationship.Builder()
--            .name("incompatible")
--            .description("CSV content that could not be converted")
--            .build();
++        .name("incompatible")
++        .description("CSV content that could not be converted")
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor SCHEMA
--            = new PropertyDescriptor.Builder()
--            .name("Record schema")
--            .description("Outgoing Avro schema for each record created from a CSV row")
--            .addValidator(SCHEMA_VALIDATOR)
--            .expressionLanguageSupported(true)
--            .required(true)
--            .build();
++    static final PropertyDescriptor SCHEMA = new PropertyDescriptor.Builder()
++        .name("Record schema")
++        .description("Outgoing Avro schema for each record created from a CSV row")
++        .addValidator(SCHEMA_VALIDATOR)
++        .expressionLanguageSupported(true)
++        .required(true)
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor CHARSET
--            = new PropertyDescriptor.Builder()
--            .name("CSV charset")
--            .description("Character set for CSV files")
--            .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
--            .defaultValue(DEFAULTS.charset)
--            .build();
++    static final PropertyDescriptor CHARSET = new PropertyDescriptor.Builder()
++        .name("CSV charset")
++        .description("Character set for CSV files")
++        .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
++        .defaultValue(DEFAULTS.charset)
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor DELIMITER
--            = new PropertyDescriptor.Builder()
--            .name("CSV delimiter")
--            .description("Delimiter character for CSV records")
--            .addValidator(CHAR_VALIDATOR)
--            .defaultValue(DEFAULTS.delimiter)
--            .build();
++    static final PropertyDescriptor DELIMITER = new PropertyDescriptor.Builder()
++        .name("CSV delimiter")
++        .description("Delimiter character for CSV records")
++        .addValidator(CHAR_VALIDATOR)
++        .defaultValue(DEFAULTS.delimiter)
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor QUOTE
--            = new PropertyDescriptor.Builder()
--            .name("CSV quote character")
--            .description("Quote character for CSV values")
--            .addValidator(CHAR_VALIDATOR)
--            .defaultValue(DEFAULTS.quote)
--            .build();
++    static final PropertyDescriptor QUOTE = new PropertyDescriptor.Builder()
++        .name("CSV quote character")
++        .description("Quote character for CSV values")
++        .addValidator(CHAR_VALIDATOR)
++        .defaultValue(DEFAULTS.quote)
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor ESCAPE
--            = new PropertyDescriptor.Builder()
--            .name("CSV escape character")
--            .description("Escape character for CSV values")
--            .addValidator(CHAR_VALIDATOR)
--            .defaultValue(DEFAULTS.escape)
--            .build();
++    static final PropertyDescriptor ESCAPE = new PropertyDescriptor.Builder()
++        .name("CSV escape character")
++        .description("Escape character for CSV values")
++        .addValidator(CHAR_VALIDATOR)
++        .defaultValue(DEFAULTS.escape)
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor HAS_HEADER
--            = new PropertyDescriptor.Builder()
--            .name("Use CSV header line")
--            .description("Whether to use the first line as a header")
--            .addValidator(StandardValidators.BOOLEAN_VALIDATOR)
--            .defaultValue(String.valueOf(DEFAULTS.useHeader))
--            .build();
++    static final PropertyDescriptor HAS_HEADER = new PropertyDescriptor.Builder()
++        .name("Use CSV header line")
++        .description("Whether to use the first line as a header")
++        .addValidator(StandardValidators.BOOLEAN_VALIDATOR)
++        .defaultValue(String.valueOf(DEFAULTS.useHeader))
++        .build();
  
      @VisibleForTesting
--    static final PropertyDescriptor LINES_TO_SKIP
--            = new PropertyDescriptor.Builder()
--            .name("Lines to skip")
--            .description("Number of lines to skip before reading header or data")
--            .addValidator(createLongValidator(0L, Integer.MAX_VALUE, true))
--            .defaultValue(String.valueOf(DEFAULTS.linesToSkip))
--            .build();
--
--    private static final List<PropertyDescriptor> PROPERTIES
--            = ImmutableList.<PropertyDescriptor>builder()
--            .addAll(AbstractKiteProcessor.getProperties())
--            .add(SCHEMA)
--            .add(CHARSET)
--            .add(DELIMITER)
--            .add(QUOTE)
--            .add(ESCAPE)
--            .add(HAS_HEADER)
--            .add(LINES_TO_SKIP)
--            .build();
--
--    private static final Set<Relationship> RELATIONSHIPS
--            = ImmutableSet.<Relationship>builder()
--            .add(SUCCESS)
--            .add(FAILURE)
--            .add(INCOMPATIBLE)
--            .build();
++    static final PropertyDescriptor LINES_TO_SKIP = new PropertyDescriptor.Builder()
++        .name("Lines to skip")
++        .description("Number of lines to skip before reading header or data")
++        .addValidator(createLongValidator(0L, Integer.MAX_VALUE, true))
++        .defaultValue(String.valueOf(DEFAULTS.linesToSkip))
++        .build();
++
++    private static final List<PropertyDescriptor> PROPERTIES = ImmutableList.<PropertyDescriptor> builder()
++        .addAll(AbstractKiteProcessor.getProperties())
++        .add(SCHEMA)
++        .add(CHARSET)
++        .add(DELIMITER)
++        .add(QUOTE)
++        .add(ESCAPE)
++        .add(HAS_HEADER)
++        .add(LINES_TO_SKIP)
++        .build();
++
++    private static final Set<Relationship> RELATIONSHIPS = ImmutableSet.<Relationship> builder()
++        .add(SUCCESS)
++        .add(FAILURE)
++        .add(INCOMPATIBLE)
++        .build();
  
      // Immutable configuration
      @VisibleForTesting
@@@ -196,26 -198,26 +191,26 @@@
          super.setDefaultConfiguration(context);
  
          this.props = new CSVProperties.Builder()
--                .charset(context.getProperty(CHARSET).getValue())
--                .delimiter(context.getProperty(DELIMITER).getValue())
--                .quote(context.getProperty(QUOTE).getValue())
--                .escape(context.getProperty(ESCAPE).getValue())
--                .hasHeader(context.getProperty(HAS_HEADER).asBoolean())
--                .linesToSkip(context.getProperty(LINES_TO_SKIP).asInteger())
--                .build();
++            .charset(context.getProperty(CHARSET).getValue())
++            .delimiter(context.getProperty(DELIMITER).getValue())
++            .quote(context.getProperty(QUOTE).getValue())
++            .escape(context.getProperty(ESCAPE).getValue())
++            .hasHeader(context.getProperty(HAS_HEADER).asBoolean())
++            .linesToSkip(context.getProperty(LINES_TO_SKIP).asInteger())
++            .build();
      }
  
      @Override
      public void onTrigger(ProcessContext context, final ProcessSession session)
--            throws ProcessException {
++        throws ProcessException {
          FlowFile incomingCSV = session.get();
          if (incomingCSV == null) {
              return;
          }
  
          String schemaProperty = context.getProperty(SCHEMA)
--                .evaluateAttributeExpressions(incomingCSV)
--                .getValue();
++            .evaluateAttributeExpressions(incomingCSV)
++            .getValue();
          final Schema schema;
          try {
              schema = getSchema(schemaProperty, DefaultConfiguration.get());
@@@ -225,78 -227,85 +220,87 @@@
              return;
          }
  
--        final DataFileWriter<Record> writer = new DataFileWriter<>(
--                AvroUtil.newDatumWriter(schema, Record.class));
--        writer.setCodec(CodecFactory.snappyCodec());
++        try (final DataFileWriter<Record> writer = new DataFileWriter<>(AvroUtil.newDatumWriter(schema, Record.class))) {
++            writer.setCodec(CodecFactory.snappyCodec());
  
--        try {
--            final LongHolder written = new LongHolder(0L);
--            final FailureTracker failures = new FailureTracker();
--
--            FlowFile badRecords = session.clone(incomingCSV);
--            FlowFile outgoingAvro = session.write(incomingCSV, new StreamCallback() {
--                @Override
--                public void process(InputStream in, OutputStream out) throws IOException {
--                    try (CSVFileReader<Record> reader = new CSVFileReader<>(
++            try {
++                final LongHolder written = new LongHolder(0L);
++                final FailureTracker failures = new FailureTracker();
++
++                FlowFile badRecords = session.clone(incomingCSV);
++                FlowFile outgoingAvro = session.write(incomingCSV, new StreamCallback() {
++                    @Override
++                    public void process(InputStream in, OutputStream out) throws IOException {
++                        try (CSVFileReader<Record> reader = new CSVFileReader<>(
                              in, props, schema, Record.class)) {
--                        reader.initialize();
--                        try (DataFileWriter<Record> w = writer.create(schema, out)) {
--                            while (reader.hasNext()) {
--                                try {
--                                    Record record = reader.next();
--                                    w.append(record);
--                                    written.incrementAndGet();
--                                } catch (DatasetRecordException e) {
--                                    failures.add(e);
++                            reader.initialize();
++                            try (DataFileWriter<Record> w = writer.create(schema, out)) {
++                                while (reader.hasNext()) {
++                                    try {
++                                        Record record = reader.next();
++                                        w.append(record);
++                                        written.incrementAndGet();
++                                    } catch (DatasetRecordException e) {
++                                        failures.add(e);
++                                    }
                                  }
                              }
                          }
                      }
--                }
--            });
++                });
  
--            long errors = failures.count();
++                long errors = failures.count();
  
--            session.adjustCounter("Converted records", written.get(),
++                session.adjustCounter("Converted records", written.get(),
                      false /* update only if file transfer is successful */);
--            session.adjustCounter("Conversion errors", errors,
++                session.adjustCounter("Conversion errors", errors,
                      false /* update only if file transfer is successful */);
  
--            if (written.get() > 0L) {
--                session.transfer(outgoingAvro, SUCCESS);
++                if (written.get() > 0L) {
++                    session.transfer(outgoingAvro, SUCCESS);
  
--                if (errors > 0L) {
--                    getLogger().warn("Failed to convert {}/{} records from CSV to Avro",
--                            new Object[] { errors, errors + written.get() });
--                    badRecords = session.putAttribute(
++                    if (errors > 0L) {
++                        getLogger().warn("Failed to convert {}/{} records from CSV to Avro",
++                            new Object[] {errors, errors + written.get()});
++                        badRecords = session.putAttribute(
                              badRecords, "errors", failures.summary());
--                    session.transfer(badRecords, INCOMPATIBLE);
--                } else {
--                    session.remove(badRecords);
--                }
++                        session.transfer(badRecords, INCOMPATIBLE);
++                    } else {
++                        session.remove(badRecords);
++                    }
  
--            } else {
--                session.remove(outgoingAvro);
++                } else {
++                    session.remove(outgoingAvro);
  
--                if (errors > 0L) {
--                    getLogger().warn("Failed to convert {}/{} records from CSV to Avro",
--                            new Object[] { errors, errors });
--                    badRecords = session.putAttribute(
++                    if (errors > 0L) {
++                        getLogger().warn("Failed to convert {}/{} records from CSV to Avro",
++                            new Object[] {errors, errors});
++                        badRecords = session.putAttribute(
                              badRecords, "errors", failures.summary());
--                } else {
--                    badRecords = session.putAttribute(
++                    } else {
++                        badRecords = session.putAttribute(
                              badRecords, "errors", "No incoming records");
++                    }
++
++                    session.transfer(badRecords, FAILURE);
                  }
  
--                session.transfer(badRecords, FAILURE);
++            } catch (ProcessException | DatasetIOException e) {
++                getLogger().error("Failed reading or writing", e);
++                session.transfer(incomingCSV, FAILURE);
++            } catch (DatasetException e) {
++                getLogger().error("Failed to read FlowFile", e);
++                session.transfer(incomingCSV, FAILURE);
              }
 -
 -        } catch (ProcessException | DatasetIOException e) {
 -            getLogger().error("Failed reading or writing", e);
 -            session.transfer(incomingCSV, FAILURE);
 -        } catch (DatasetException e) {
 -            getLogger().error("Failed to read FlowFile", e);
 -            session.transfer(incomingCSV, FAILURE);
++        } catch (final IOException ioe) {
++            throw new RuntimeException("Unable to close Avro Writer", ioe);
+         }
+     }
  
-         } catch (ProcessException | DatasetIOException e) {
-             getLogger().error("Failed reading or writing", e);
-             session.transfer(incomingCSV, FAILURE);
-         } catch (DatasetException e) {
-             getLogger().error("Failed to read FlowFile", e);
-             session.transfer(incomingCSV, FAILURE);
+     private static String unescapeString(String input) {
+         if (input.length() > 1) {
+             input = StringEscapeUtils.unescapeJava(input);
          }
+         return input;
      }
  }

http://git-wip-us.apache.org/repos/asf/nifi/blob/0636f0e7/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/nifi/blob/0636f0e7/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/nifi/blob/0636f0e7/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
----------------------------------------------------------------------
diff --cc nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
index 258e122,9ad1703..88b6666
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
@@@ -63,9 -61,8 +63,9 @@@ import org.eclipse.jetty.servlet.Servle
  import org.eclipse.jetty.util.ssl.SslContextFactory;
  import org.eclipse.jetty.util.thread.QueuedThreadPool;
  
 +@InputRequirement(Requirement.INPUT_FORBIDDEN)
  @Tags({"ingest", "http", "https", "rest", "listen"})
- @CapabilityDescription("Starts an HTTP Server that is used to receive FlowFiles from remote sources. The URL of the Service will be http://{hostname}:{port}/contentListener")
+ @CapabilityDescription("Starts an HTTP Server that is used to receive FlowFiles from remote sources. The default URI of the Service will be http://{hostname}:{port}/contentListener")
  public class ListenHTTP extends AbstractSessionFactoryProcessor {
  
      private Set<Relationship> relationships;


[11/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
index cbcc54d..385ac73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
@@ -23,7 +23,8 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -57,6 +58,7 @@ import org.apache.nifi.processors.hadoop.util.SequenceFileWriter;
  *
  */
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "sequence file", "create", "sequencefile"})
 @CapabilityDescription("Creates Hadoop Sequence Files from incoming flow files")
 @SeeAlso(PutHDFS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
index 4a52fb7..aa03e73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -44,6 +46,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "hdfs", "get", "ingest", "fetch", "source"})
 @CapabilityDescription("Retrieves a file from HDFS. The content of the incoming FlowFile is replaced by the content of the file in HDFS. "
         + "The file in HDFS is left intact without any changes being made to it.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
index de776d4..4c9deea 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
@@ -41,6 +41,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -62,6 +64,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
 @CapabilityDescription("Fetch files from Hadoop Distributed File System (HDFS) into FlowFiles. This Processor will delete the file from HDFS after fetching it.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
index 151cbf2..563bda8 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -66,6 +68,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
 @CapabilityDescription("Retrieves a listing of files from HDFS. For each file that is listed in HDFS, creates a FlowFile that represents "
         + "the HDFS file so that it can be fetched in conjunction with ListHDFS. This Processor is designed to run on Primary Node only "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
index 901159b..bedf1b9 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -59,6 +61,7 @@ import org.apache.nifi.util.StopWatch;
 /**
  * This processor copies FlowFiles to HDFS.
  */
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "HDFS", "put", "copy", "filesystem"})
 @CapabilityDescription("Write FlowFile data to Hadoop Distributed File System (HDFS)")
 @WritesAttribute(attribute = "filename", description = "The name of the file written to HDFS comes from the value of this attribute.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
index 574fb2d..3a6ac79 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
@@ -26,6 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,6 +59,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "health level 7", "healthcare", "extract", "attributes"})
 @CapabilityDescription("Extracts information from an HL7 (Health Level 7) formatted FlowFile and adds the information as FlowFile Attributes. "
         + "The attributes are named as <Segment Name> <dot> <Field Index>. If the segment is repeating, the naming will be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
index 53e7e69..26e8bb6 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
@@ -29,6 +29,8 @@ import java.util.Set;
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -63,6 +65,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "healthcare", "route", "Health Level 7"})
 @DynamicProperties({
     @DynamicProperty(name = "Name of a Relationship", value = "An HL7 Query Language query",

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
index 7fe6195..b44eccd 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
@@ -16,11 +16,18 @@
  */
 package org.apache.nifi.processors.image;
 
-import com.drew.imaging.ImageMetadataReader;
-import com.drew.imaging.ImageProcessingException;
-import com.drew.metadata.Directory;
-import com.drew.metadata.Metadata;
-import com.drew.metadata.Tag;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -30,25 +37,22 @@ import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.HashMap;
+import com.drew.imaging.ImageMetadataReader;
+import com.drew.imaging.ImageProcessingException;
+import com.drew.metadata.Directory;
+import com.drew.metadata.Metadata;
+import com.drew.metadata.Tag;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Exif", "Exchangeable", "image", "file", "format", "JPG", "GIF", "PNG", "BMP", "metadata","IPTC", "XMP"})
 @CapabilityDescription("Extract the image metadata from flowfiles containing images. This processor relies on this "
         + "metadata extractor library https://github.com/drewnoakes/metadata-extractor. It extracts a long list of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
index c085b5f..176561f 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
@@ -17,7 +17,27 @@
 
 package org.apache.nifi.processors.image;
 
+import java.awt.Graphics2D;
+import java.awt.Image;
+import java.awt.image.BufferedImage;
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import javax.imageio.ImageIO;
+import javax.imageio.ImageReader;
+import javax.imageio.stream.ImageInputStream;
+
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -33,25 +53,9 @@ import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
-import javax.imageio.ImageIO;
-import javax.imageio.ImageReader;
-import javax.imageio.stream.ImageInputStream;
-import java.awt.Image;
-import java.awt.Graphics2D;
-import java.awt.image.BufferedImage;
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Iterator;
-import java.util.concurrent.TimeUnit;
-
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "resize", "image", "jpg", "jpeg", "png", "bmp", "wbmp", "gif" })
 @CapabilityDescription("Resizes an image to user-specified dimensions. This Processor uses the image codecs registered with the "
     + "environment that NiFi is running in. By default, this includes JPEG, PNG, BMP, WBMP, and GIF images.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
index 26590df..e10977b 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
@@ -32,18 +32,13 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import kafka.consumer.Consumer;
-import kafka.consumer.ConsumerConfig;
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
@@ -58,7 +53,15 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import kafka.consumer.Consumer;
+import kafka.consumer.ConsumerConfig;
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.message.MessageAndMetadata;
+
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches messages from Apache Kafka")
 @Tags({"Kafka", "Apache", "Get", "Ingest", "Ingress", "Topic", "PubSub"})
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
index d83c7bf..cff285c 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
@@ -30,10 +30,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -58,9 +56,13 @@ import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.stream.io.util.NonThreadSafeCircularBuffer;
 import org.apache.nifi.util.LongHolder;
 
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
 import scala.actors.threadpool.Arrays;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "Apache", "Kafka", "Put", "Send", "Message", "PubSub" })
 @CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka")
 public class PutKafka extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
index 6c20a8f..6f126aa 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
@@ -18,18 +18,20 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
+import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -53,11 +55,13 @@ import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.CSVFileReader;
 import org.kitesdk.data.spi.filesystem.CSVProperties;
 
-import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 
 @Tags({"kite", "csv", "avro"})
-@CapabilityDescription(
-        "Converts CSV files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts CSV files to Avro according to an Avro Schema")
 public class ConvertCSVToAvro extends AbstractKiteProcessor {
 
     private static final CSVProperties DEFAULTS = new CSVProperties.Builder().build();

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
index ec1503c..af120bf 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
@@ -18,18 +18,18 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -47,9 +47,13 @@ import org.kitesdk.data.SchemaNotFoundException;
 import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.JSONFileReader;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 @Tags({"kite", "json", "avro"})
-@CapabilityDescription(
-        "Converts JSON files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts JSON files to Avro according to an Avro Schema")
 public class ConvertJSONToAvro extends AbstractKiteProcessor {
 
     private static final Relationship SUCCESS = new Relationship.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
index 7a30db1..1986f0b 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
@@ -18,16 +18,17 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -46,6 +47,10 @@ import org.kitesdk.data.ValidationException;
 import org.kitesdk.data.View;
 import org.kitesdk.data.spi.SchemaValidationUtil;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"kite", "avro", "parquet", "hadoop", "hive", "hdfs", "hbase"})
 @CapabilityDescription("Stores Avro records in a Kite dataset")
 public class StoreInKiteDataset extends AbstractKiteProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
index 8398152..5f58781 100644
--- a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
+++ b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
@@ -33,6 +33,8 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -70,6 +72,7 @@ import com.sun.jersey.api.json.JSONConfiguration;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"yandex", "translate", "translation", "language"})
 @CapabilityDescription("Translates content and attributes from one language to another")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
index a78b112..e41b583 100644
--- a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
+++ b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
@@ -32,6 +32,8 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,8 +59,8 @@ import org.apache.nifi.processor.util.StandardValidators;
 import com.twitter.hbc.ClientBuilder;
 import com.twitter.hbc.core.Client;
 import com.twitter.hbc.core.Constants;
-import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.Location ;
+import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.StatusesFilterEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesFirehoseEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesSampleEndpoint;
@@ -69,6 +71,7 @@ import com.twitter.hbc.httpclient.auth.Authentication;
 import com.twitter.hbc.httpclient.auth.OAuth1;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"twitter", "tweets", "social media", "status", "json"})
 @CapabilityDescription("Pulls status changes from Twitter's streaming API")
 @WritesAttribute(attribute = "mime.type", description = "Sets mime type to application/json")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
index ff264a1..a85aa0f 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
@@ -18,7 +18,29 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
 import org.apache.commons.io.IOUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnRemoved;
@@ -41,27 +63,8 @@ import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
 @Tags({"Apache", "Solr", "Get", "Pull"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Queries Solr and outputs the results as a FlowFile")
 public class GetSolr extends SolrProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
index 560ad34..df034c9 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
@@ -18,7 +18,24 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -40,22 +57,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.MultiMapSolrParams;
 import org.apache.solr.common.util.ContentStreamBase;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
 @Tags({"Apache", "Solr", "Put", "Send"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends the contents of a FlowFile as a ContentStream to Solr")
 @DynamicProperty(name="A Solr request parameter name", value="A Solr request parameter value",
         description="These parameters will be passed to Solr on the request")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
index 9887e38..816b407 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
@@ -29,6 +29,8 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -51,101 +53,102 @@ import org.apache.nifi.util.StopWatch;
 @SupportsBatching
 @Tags({"encode", "base64"})
 @CapabilityDescription("Encodes or decodes content to and from base64")
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class Base64EncodeContent extends AbstractProcessor {
 
-    public static final String ENCODE_MODE = "Encode";
-    public static final String DECODE_MODE = "Decode";
+	public static final String ENCODE_MODE = "Encode";
+	public static final String DECODE_MODE = "Decode";
 
-    public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
-            .name("Mode")
-            .description("Specifies whether the content should be encoded or decoded")
-            .required(true)
-            .allowableValues(ENCODE_MODE, DECODE_MODE)
-            .defaultValue(ENCODE_MODE)
-            .build();
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("Any FlowFile that is successfully encoded or decoded will be routed to success")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
-            .build();
+	public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
+		.name("Mode")
+		.description("Specifies whether the content should be encoded or decoded")
+		.required(true)
+		.allowableValues(ENCODE_MODE, DECODE_MODE)
+		.defaultValue(ENCODE_MODE)
+		.build();
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
+		.build();
 
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
 
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(MODE);
-        this.properties = Collections.unmodifiableList(properties);
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(MODE);
+		this.properties = Collections.unmodifiableList(properties);
 
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        relationships.add(REL_FAILURE);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		relationships.add(REL_FAILURE);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
 
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
 
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
 
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
 
-        final ProcessorLog logger = getLogger();
+		final ProcessorLog logger = getLogger();
 
-        boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
-        try {
-            final StopWatch stopWatch = new StopWatch(true);
-            if (encode) {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64OutputStream bos = new Base64OutputStream(out)) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = in.read(buf)) > 0) {
-                                bos.write(buf, 0, len);
-                            }
-                            bos.flush();
-                        }
-                    }
-                });
-            } else {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = bis.read(buf)) > 0) {
-                                out.write(buf, 0, len);
-                            }
-                            out.flush();
-                        }
-                    }
-                });
-            }
+		boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
+		try {
+			final StopWatch stopWatch = new StopWatch(true);
+			if (encode) {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64OutputStream bos = new Base64OutputStream(out)) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = in.read(buf)) > 0) {
+								bos.write(buf, 0, len);
+							}
+							bos.flush();
+						}
+					}
+				});
+			} else {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = bis.read(buf)) > 0) {
+								out.write(buf, 0, len);
+							}
+							out.flush();
+						}
+					}
+				});
+			}
 
-            logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
-            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-            session.transfer(flowFile, REL_SUCCESS);
-        } catch (ProcessException e) {
-            logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
-            session.transfer(flowFile, REL_FAILURE);
-        }
-    }
+			logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
+			session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
+			session.transfer(flowFile, REL_SUCCESS);
+		} catch (ProcessException e) {
+			logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
+			session.transfer(flowFile, REL_FAILURE);
+		}
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
index 1b9b20c..593cf44 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
@@ -29,20 +29,18 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import lzma.sdk.lzma.Decoder;
-import lzma.streams.LzmaInputStream;
-import lzma.streams.LzmaOutputStream;
-
 import org.apache.commons.compress.compressors.CompressorStreamFactory;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
 import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -63,9 +61,14 @@ import org.tukaani.xz.LZMA2Options;
 import org.tukaani.xz.XZInputStream;
 import org.tukaani.xz.XZOutputStream;
 
+import lzma.sdk.lzma.Decoder;
+import lzma.streams.LzmaInputStream;
+import lzma.streams.LzmaOutputStream;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"content", "compress", "decompress", "gzip", "bzip2", "lzma", "xz-lzma2"})
 @CapabilityDescription("Compresses or decompresses the contents of FlowFiles using a user-specified compression algorithm and updates the mime.type "
     + "attribute as appropriate")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index 2efc852..a45c211 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -31,6 +31,12 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -43,10 +49,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.timebuffer.EntityAccess;
@@ -54,344 +56,345 @@ import org.apache.nifi.util.timebuffer.TimedBuffer;
 
 @SideEffectFree
 @TriggerSerially
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"rate control", "throttle", "rate", "throughput"})
 @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors.")
 public class ControlRate extends AbstractProcessor {
 
-    public static final String DATA_RATE = "data rate";
-    public static final String FLOWFILE_RATE = "flowfile count";
-    public static final String ATTRIBUTE_RATE = "attribute value";
-
-    public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
-            .name("Rate Control Criteria")
-            .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
-            .required(true)
-            .allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
-            .defaultValue(DATA_RATE)
-            .build();
-    public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
-            .name("Maximum Rate")
-            .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
-                    + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
-            .required(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
-            .build();
-    public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Rate Controlled Attribute")
-            .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
-                    + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
-                    + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-    public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
-            .name("Time Duration")
-            .description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
-            .required(true)
-            .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
-            .defaultValue("1 min")
-            .build();
-    public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Grouping Attribute")
-            .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
-                    + "each value specified by the attribute with this name. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("All FlowFiles are transferred to this relationship")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
-            .build();
-
-    private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
-    private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
-
-    private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
-    private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
-
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(RATE_CONTROL_CRITERIA);
-        properties.add(MAX_RATE);
-        properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
-        properties.add(TIME_PERIOD);
-        properties.add(GROUPING_ATTRIBUTE_NAME);
-        this.properties = Collections.unmodifiableList(properties);
-
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
-
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
-
-    @Override
-    protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
-
-        final Validator rateValidator;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
-                break;
-            case ATTRIBUTE_RATE:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-                if (rateAttr == null) {
-                    validationResults.add(new ValidationResult.Builder()
-                            .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
-                            .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
-                            .build());
-                }
-                break;
-            case FLOWFILE_RATE:
-            default:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                break;
-        }
-
-        final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
-        if (!rateResult.isValid()) {
-            validationResults.add(rateResult);
-        }
-
-        return validationResults;
-    }
-
-    @Override
-    public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
-        super.onPropertyModified(descriptor, oldValue, newValue);
-
-        if (descriptor.equals(RATE_CONTROL_CRITERIA)
-                || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
-                || descriptor.equals(GROUPING_ATTRIBUTE_NAME)
-                || descriptor.equals(TIME_PERIOD)) {
-            // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
-            throttleMap.clear();
-        } else if (descriptor.equals(MAX_RATE)) {
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
-                newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(newValue);
-            }
-
-            for (final Throttle throttle : throttleMap.values()) {
-                throttle.setMaxRate(newRate);
-            }
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-        final long lastClearTime = lastThrottleClearTime.get();
-        final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-        if (lastClearTime < throttleExpirationMillis) {
-            if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
-                final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
-                while (itr.hasNext()) {
-                    final Map.Entry<String, Throttle> entry = itr.next();
-                    final Throttle throttle = entry.getValue();
-                    if (throttle.tryLock()) {
-                        try {
-                            if (throttle.lastUpdateTime() < lastClearTime) {
-                                itr.remove();
-                            }
-                        } finally {
-                            throttle.unlock();
-                        }
-                    }
-                }
-            }
-        }
-
-        // TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
-
-        final ProcessorLog logger = getLogger();
-        final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
-        final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-        long rateValue;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValue = flowFile.getSize();
-                break;
-            case FLOWFILE_RATE:
-                rateValue = 1;
-                break;
-            case ATTRIBUTE_RATE:
-                final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
-                if (attributeValue == null) {
-                    logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-
-                if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
-                    logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
-                            new Object[]{flowFile, rateControlAttributeName, attributeValue});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-                rateValue = Long.parseLong(attributeValue);
-                break;
-            default:
-                throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
-        }
-
-        final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
-        final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
-        Throttle throttle = throttleMap.get(groupName);
-        if (throttle == null) {
-            throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
-
-            final String maxRateValue = context.getProperty(MAX_RATE).getValue();
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
-                newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(maxRateValue);
-            }
-            throttle.setMaxRate(newRate);
-
-            throttleMap.put(groupName, throttle);
-        }
-
-        throttle.lock();
-        try {
-            if (throttle.tryAdd(rateValue)) {
-                logger.info("transferring {} to 'success'", new Object[]{flowFile});
-                session.transfer(flowFile, REL_SUCCESS);
-            } else {
-                flowFile = session.penalize(flowFile);
-                session.transfer(flowFile);
-            }
-        } finally {
-            throttle.unlock();
-        }
-    }
-
-    private static class TimestampedLong {
-
-        private final Long value;
-        private final long timestamp = System.currentTimeMillis();
-
-        public TimestampedLong(final Long value) {
-            this.value = value;
-        }
-
-        public Long getValue() {
-            return value;
-        }
-
-        public long getTimestamp() {
-            return timestamp;
-        }
-    }
-
-    private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
-
-        @Override
-        public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
-            if (oldValue == null && toAdd == null) {
-                return new TimestampedLong(0L);
-            } else if (oldValue == null) {
-                return toAdd;
-            } else if (toAdd == null) {
-                return oldValue;
-            }
-
-            return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
-        }
-
-        @Override
-        public TimestampedLong createNew() {
-            return new TimestampedLong(0L);
-        }
-
-        @Override
-        public long getTimestamp(TimestampedLong entity) {
-            return entity == null ? 0L : entity.getTimestamp();
-        }
-    }
-
-    private static class Throttle extends ReentrantLock {
-
-        private final AtomicLong maxRate = new AtomicLong(1L);
-        private final long timePeriodValue;
-        private final TimeUnit timePeriodUnit;
-        private final TimedBuffer<TimestampedLong> timedBuffer;
-        private final ProcessorLog logger;
-
-        private volatile long penalizationExpired;
-        private volatile long lastUpdateTime;
-
-        public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
-            this.timePeriodUnit = unit;
-            this.timePeriodValue = timePeriod;
-            this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
-            this.logger = logger;
-        }
-
-        public void setMaxRate(final long maxRate) {
-            this.maxRate.set(maxRate);
-        }
-
-        public long lastUpdateTime() {
-            return lastUpdateTime;
-        }
-
-        public boolean tryAdd(final long value) {
-            final long now = System.currentTimeMillis();
-            if (penalizationExpired > now) {
-                return false;
-            }
-
-            final long maxRateValue = maxRate.get();
-
-            final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
-            if (sum != null && sum.getValue() >= maxRateValue) {
-                logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
-                return false;
-            }
-
-            logger.debug("current sum for throttle is {}, so allowing rate of {} through",
-                    new Object[]{sum == null ? 0 : sum.getValue(), value});
-
-            final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
-            if (transferred > maxRateValue) {
-                final long amountOver = transferred - maxRateValue;
-                // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
-                final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
-                final double pct = (double) amountOver / (double) maxRateValue;
-                final long penalizationPeriod = (long) (milliDuration * pct);
-                this.penalizationExpired = now + penalizationPeriod;
-                logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
-            }
-
-            lastUpdateTime = now;
-            return true;
-        }
-    }
+	public static final String DATA_RATE = "data rate";
+	public static final String FLOWFILE_RATE = "flowfile count";
+	public static final String ATTRIBUTE_RATE = "attribute value";
+
+	public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
+		.name("Rate Control Criteria")
+		.description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
+		.required(true)
+		.allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
+		.defaultValue(DATA_RATE)
+		.build();
+	public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
+		.name("Maximum Rate")
+		.description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
+			+ "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
+		.required(true)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
+		.build();
+	public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Rate Controlled Attribute")
+		.description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
+			+ "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
+			+ "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+	public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
+		.name("Time Duration")
+		.description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
+		.required(true)
+		.addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
+		.defaultValue("1 min")
+		.build();
+	public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Grouping Attribute")
+		.description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
+			+ "each value specified by the attribute with this name. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("All FlowFiles are transferred to this relationship")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
+		.build();
+
+	private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
+	private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
+
+	private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
+	private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
+
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(RATE_CONTROL_CRITERIA);
+		properties.add(MAX_RATE);
+		properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
+		properties.add(TIME_PERIOD);
+		properties.add(GROUPING_ATTRIBUTE_NAME);
+		this.properties = Collections.unmodifiableList(properties);
+
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
+
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
+
+	@Override
+	protected Collection<ValidationResult> customValidate(final ValidationContext context) {
+		final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
+
+		final Validator rateValidator;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
+				break;
+			case ATTRIBUTE_RATE:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+				if (rateAttr == null) {
+					validationResults.add(new ValidationResult.Builder()
+						.subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+						.explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+						.build());
+				}
+				break;
+			case FLOWFILE_RATE:
+			default:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				break;
+		}
+
+		final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
+		if (!rateResult.isValid()) {
+			validationResults.add(rateResult);
+		}
+
+		return validationResults;
+	}
+
+	@Override
+	public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
+		super.onPropertyModified(descriptor, oldValue, newValue);
+
+		if (descriptor.equals(RATE_CONTROL_CRITERIA)
+			|| descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
+			|| descriptor.equals(GROUPING_ATTRIBUTE_NAME)
+			|| descriptor.equals(TIME_PERIOD)) {
+			// if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
+			throttleMap.clear();
+		} else if (descriptor.equals(MAX_RATE)) {
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
+				newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(newValue);
+			}
+
+			for (final Throttle throttle : throttleMap.values()) {
+				throttle.setMaxRate(newRate);
+			}
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+		final long lastClearTime = lastThrottleClearTime.get();
+		final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+		if (lastClearTime < throttleExpirationMillis) {
+			if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
+				final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
+				while (itr.hasNext()) {
+					final Map.Entry<String, Throttle> entry = itr.next();
+					final Throttle throttle = entry.getValue();
+					if (throttle.tryLock()) {
+						try {
+							if (throttle.lastUpdateTime() < lastClearTime) {
+								itr.remove();
+							}
+						} finally {
+							throttle.unlock();
+						}
+					}
+				}
+			}
+		}
+
+		// TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
+
+		final ProcessorLog logger = getLogger();
+		final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
+		final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+		long rateValue;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValue = flowFile.getSize();
+				break;
+			case FLOWFILE_RATE:
+				rateValue = 1;
+				break;
+			case ATTRIBUTE_RATE:
+				final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
+				if (attributeValue == null) {
+					logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+
+				if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
+					logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
+						new Object[]{flowFile, rateControlAttributeName, attributeValue});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+				rateValue = Long.parseLong(attributeValue);
+				break;
+			default:
+				throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
+		}
+
+		final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
+		final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
+		Throttle throttle = throttleMap.get(groupName);
+		if (throttle == null) {
+			throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
+
+			final String maxRateValue = context.getProperty(MAX_RATE).getValue();
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
+				newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(maxRateValue);
+			}
+			throttle.setMaxRate(newRate);
+
+			throttleMap.put(groupName, throttle);
+		}
+
+		throttle.lock();
+		try {
+			if (throttle.tryAdd(rateValue)) {
+				logger.info("transferring {} to 'success'", new Object[]{flowFile});
+				session.transfer(flowFile, REL_SUCCESS);
+			} else {
+				flowFile = session.penalize(flowFile);
+				session.transfer(flowFile);
+			}
+		} finally {
+			throttle.unlock();
+		}
+	}
+
+	private static class TimestampedLong {
+
+		private final Long value;
+		private final long timestamp = System.currentTimeMillis();
+
+		public TimestampedLong(final Long value) {
+			this.value = value;
+		}
+
+		public Long getValue() {
+			return value;
+		}
+
+		public long getTimestamp() {
+			return timestamp;
+		}
+	}
+
+	private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
+
+		@Override
+		public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
+			if (oldValue == null && toAdd == null) {
+				return new TimestampedLong(0L);
+			} else if (oldValue == null) {
+				return toAdd;
+			} else if (toAdd == null) {
+				return oldValue;
+			}
+
+			return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
+		}
+
+		@Override
+		public TimestampedLong createNew() {
+			return new TimestampedLong(0L);
+		}
+
+		@Override
+		public long getTimestamp(TimestampedLong entity) {
+			return entity == null ? 0L : entity.getTimestamp();
+		}
+	}
+
+	private static class Throttle extends ReentrantLock {
+
+		private final AtomicLong maxRate = new AtomicLong(1L);
+		private final long timePeriodValue;
+		private final TimeUnit timePeriodUnit;
+		private final TimedBuffer<TimestampedLong> timedBuffer;
+		private final ProcessorLog logger;
+
+		private volatile long penalizationExpired;
+		private volatile long lastUpdateTime;
+
+		public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
+			this.timePeriodUnit = unit;
+			this.timePeriodValue = timePeriod;
+			this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
+			this.logger = logger;
+		}
+
+		public void setMaxRate(final long maxRate) {
+			this.maxRate.set(maxRate);
+		}
+
+		public long lastUpdateTime() {
+			return lastUpdateTime;
+		}
+
+		public boolean tryAdd(final long value) {
+			final long now = System.currentTimeMillis();
+			if (penalizationExpired > now) {
+				return false;
+			}
+
+			final long maxRateValue = maxRate.get();
+
+			final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
+			if (sum != null && sum.getValue() >= maxRateValue) {
+				logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
+				return false;
+			}
+
+			logger.debug("current sum for throttle is {}, so allowing rate of {} through",
+				new Object[]{sum == null ? 0 : sum.getValue(), value});
+
+			final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
+			if (transferred > maxRateValue) {
+				final long amountOver = transferred - maxRateValue;
+				// determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
+				final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
+				final double pct = (double) amountOver / (double) maxRateValue;
+				final long penalizationPeriod = (long) (milliDuration * pct);
+				this.penalizationExpired = now + penalizationPeriod;
+				logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
+			}
+
+			lastUpdateTime = now;
+			return true;
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index a0a1364..7a99a59 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -33,8 +33,10 @@ import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
@@ -76,6 +78,7 @@ import java.util.concurrent.TimeUnit;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @SupportsBatching
 @Tags({"text", "convert", "characterset", "character set"})
 @CapabilityDescription("Converts a FlowFile's content from one character set to another")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
index 7eda593..9591960 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
@@ -34,10 +34,12 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -64,6 +66,7 @@ import org.codehaus.jackson.node.JsonNodeFactory;
 @SideEffectFree
 @SupportsBatching
 @SeeAlso(PutSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"json", "sql", "database", "rdbms", "insert", "update", "relational", "flat"})
 @CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE or INSERT SQL statement. The incoming FlowFile is expected to be "
         + "\"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to "


[16/19] nifi git commit: NIFI-810: Addressed several checkstyle violations

Posted by ma...@apache.org.
NIFI-810: Addressed several checkstyle violations


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/ccfb57fe
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/ccfb57fe
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/ccfb57fe

Branch: refs/heads/master
Commit: ccfb57fe9ff43f11319dcb1625bfc78b1d88f56a
Parents: b974445
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:48:51 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:48:51 2015 -0400

----------------------------------------------------------------------
 .../annotation/behavior/InputRequirement.java   |  70 +-
 .../nifi/processors/aws/s3/PutS3Object.java     |  46 +-
 .../apache/nifi/controller/ProcessorNode.java   |  88 +--
 .../nifi/controller/StandardProcessorNode.java  |  10 +-
 .../standard/Base64EncodeContent.java           | 168 ++---
 .../nifi/processors/standard/ControlRate.java   | 672 +++++++++----------
 6 files changed, 534 insertions(+), 520 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
----------------------------------------------------------------------
diff --git a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
index 97e6b88..13f442c 100644
--- a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
+++ b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.nifi.annotation.behavior;
 
 import java.lang.annotation.Documented;
@@ -21,31 +37,31 @@ import java.lang.annotation.Target;
 @Retention(RetentionPolicy.RUNTIME)
 @Inherited
 public @interface InputRequirement {
-	Requirement value();
-	
-	public static enum Requirement {
-		/**
-		 * This value is used to indicate that the Processor requires input from other Processors
-		 * in order to run. As a result, the Processor will not be valid if it does not have any
-		 * incoming connections.
-		 */
-		INPUT_REQUIRED,
-		
-		/**
-		 * This value is used to indicate that the Processor will consume data from an incoming
-		 * connection but does not require an incoming connection in order to perform its task.
-		 * If the {@link InputRequirement} annotation is not present, this is the default value
-		 * that is used.
-		 */
-		INPUT_ALLOWED,
-		
-		/**
-		 * This value is used to indicate that the Processor is a "Source Processor" and does
-		 * not accept incoming connections. Because the Processor does not pull FlowFiles from
-		 * an incoming connection, it can be very confusing for users who create incoming connections
-		 * to the Processor. As a result, this value can be used in order to clarify that incoming
-		 * connections will not be used. This prevents the user from even creating such a connection.
-		 */
-		INPUT_FORBIDDEN;
-	}
+    Requirement value();
+
+    public static enum Requirement {
+        /**
+         * This value is used to indicate that the Processor requires input from other Processors
+         * in order to run. As a result, the Processor will not be valid if it does not have any
+         * incoming connections.
+         */
+        INPUT_REQUIRED,
+
+        /**
+         * This value is used to indicate that the Processor will consume data from an incoming
+         * connection but does not require an incoming connection in order to perform its task.
+         * If the {@link InputRequirement} annotation is not present, this is the default value
+         * that is used.
+         */
+        INPUT_ALLOWED,
+
+        /**
+         * This value is used to indicate that the Processor is a "Source Processor" and does
+         * not accept incoming connections. Because the Processor does not pull FlowFiles from
+         * an incoming connection, it can be very confusing for users who create incoming connections
+         * to the Processor. As a result, this value can be used in order to clarify that incoming
+         * connections will not be used. This prevents the user from even creating such a connection.
+         */
+        INPUT_FORBIDDEN;
+    }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 7398c4e..c7212f5 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -59,10 +59,8 @@ import com.amazonaws.services.s3.model.StorageClass;
 @InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Archive", "Put"})
 @CapabilityDescription("Puts FlowFiles to an Amazon S3 Bucket")
-@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
-        value = "The value of a User-Defined Metadata field to add to the S3 Object",
-        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
-        supportsExpressionLanguage = true)
+@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object", value = "The value of a User-Defined Metadata field to add to the S3 Object",
+    description = "Allows user-defined metadata to be added to the S3 object as key/value pairs", supportsExpressionLanguage = true)
 @ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
 @WritesAttributes({
     @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object that was put to S3"),
@@ -72,22 +70,22 @@ import com.amazonaws.services.s3.model.StorageClass;
 public class PutS3Object extends AbstractS3Processor {
 
     public static final PropertyDescriptor EXPIRATION_RULE_ID = new PropertyDescriptor.Builder()
-            .name("Expiration Time Rule")
-            .required(false)
-            .expressionLanguageSupported(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .build();
+        .name("Expiration Time Rule")
+        .required(false)
+        .expressionLanguageSupported(true)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .build();
 
     public static final PropertyDescriptor STORAGE_CLASS = new PropertyDescriptor.Builder()
-            .name("Storage Class")
-            .required(true)
-            .allowableValues(StorageClass.Standard.name(), StorageClass.ReducedRedundancy.name())
-            .defaultValue(StorageClass.Standard.name())
-            .build();
+        .name("Storage Class")
+        .required(true)
+        .allowableValues(StorageClass.Standard.name(), StorageClass.ReducedRedundancy.name())
+        .defaultValue(StorageClass.Standard.name())
+        .build();
 
     public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
-            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, STORAGE_CLASS, REGION, TIMEOUT, EXPIRATION_RULE_ID,
-                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
+        Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, STORAGE_CLASS, REGION, TIMEOUT, EXPIRATION_RULE_ID,
+            FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
 
     @Override
     protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
@@ -97,15 +95,15 @@ public class PutS3Object extends AbstractS3Processor {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .name(propertyDescriptorName)
-                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-                .expressionLanguageSupported(true)
-                .dynamic(true)
-                .build();
+            .name(propertyDescriptorName)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .dynamic(true)
+            .build();
     }
 
     @Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) {
+    public void onTrigger(final ProcessContext context, final ProcessSession session) {
         FlowFile flowFile = session.get();
         if (flowFile == null) {
             return;
@@ -176,9 +174,9 @@ public class PutS3Object extends AbstractS3Processor {
             final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
             session.getProvenanceReporter().send(flowFile, url, millis);
 
-            getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", new Object[]{ff, millis});
+            getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", new Object[] {ff, millis});
         } catch (final ProcessException | AmazonClientException pe) {
-            getLogger().error("Failed to put {} to Amazon S3 due to {}", new Object[]{flowFile, pe});
+            getLogger().error("Failed to put {} to Amazon S3 due to {}", new Object[] {flowFile, pe});
             session.transfer(flowFile, REL_FAILURE);
         }
     }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
index 2f72d0f..d340c77 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
@@ -31,72 +31,72 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
 
 public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
 
-	public ProcessorNode(final Processor processor, final String id,
-		final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
-		super(processor, id, validationContextFactory, serviceProvider);
-	}
+    public ProcessorNode(final Processor processor, final String id,
+        final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
+        super(processor, id, validationContextFactory, serviceProvider);
+    }
 
-	public abstract boolean isIsolated();
+    public abstract boolean isIsolated();
 
-	public abstract boolean isTriggerWhenAnyDestinationAvailable();
+    public abstract boolean isTriggerWhenAnyDestinationAvailable();
 
-	@Override
-	public abstract boolean isSideEffectFree();
+    @Override
+    public abstract boolean isSideEffectFree();
 
-	public abstract boolean isTriggeredSerially();
+    public abstract boolean isTriggeredSerially();
 
-	public abstract boolean isEventDrivenSupported();
+    public abstract boolean isEventDrivenSupported();
 
-	public abstract boolean isHighThroughputSupported();
+    public abstract boolean isHighThroughputSupported();
 
-	public abstract Requirement getInputRequirement();
+    public abstract Requirement getInputRequirement();
 
-	@Override
-	public abstract boolean isValid();
+    @Override
+    public abstract boolean isValid();
 
-	public abstract void setScheduledState(ScheduledState scheduledState);
+    public abstract void setScheduledState(ScheduledState scheduledState);
 
-	public abstract void setBulletinLevel(LogLevel bulletinLevel);
+    public abstract void setBulletinLevel(LogLevel bulletinLevel);
 
-	public abstract LogLevel getBulletinLevel();
+    public abstract LogLevel getBulletinLevel();
 
-	public abstract Processor getProcessor();
+    public abstract Processor getProcessor();
 
-	public abstract void yield(long period, TimeUnit timeUnit);
+    public abstract void yield(long period, TimeUnit timeUnit);
 
-	public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
+    public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
 
-	public abstract Set<Relationship> getAutoTerminatedRelationships();
+    public abstract Set<Relationship> getAutoTerminatedRelationships();
 
-	public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
+    public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
 
-	@Override
-	public abstract SchedulingStrategy getSchedulingStrategy();
+    @Override
+    public abstract SchedulingStrategy getSchedulingStrategy();
 
-	public abstract void setRunDuration(long duration, TimeUnit timeUnit);
+    public abstract void setRunDuration(long duration, TimeUnit timeUnit);
 
-	public abstract long getRunDuration(TimeUnit timeUnit);
+    public abstract long getRunDuration(TimeUnit timeUnit);
 
-	public abstract Map<String, String> getStyle();
+    public abstract Map<String, String> getStyle();
 
-	public abstract void setStyle(Map<String, String> style);
+    public abstract void setStyle(Map<String, String> style);
 
-	/**
-	 * @return the number of threads (concurrent tasks) currently being used by
-	 * this Processor
-	 */
-	public abstract int getActiveThreadCount();
+    /**
+     * @return the number of threads (concurrent tasks) currently being used by
+     *         this Processor
+     */
+    public abstract int getActiveThreadCount();
 
-	/**
-	 * Verifies that this Processor can be started if the provided set of
-	 * services are enabled. This is introduced because we need to verify that
-	 * all components can be started before starting any of them. In order to do
-	 * that, we need to know that this component can be started if the given
-	 * services are enabled, as we will then enable the given services before
-	 * starting this component.
-	 *
-	 * @param ignoredReferences to ignore
-	 */
-	public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
+    /**
+     * Verifies that this Processor can be started if the provided set of
+     * services are enabled. This is introduced because we need to verify that
+     * all components can be started before starting any of them. In order to do
+     * that, we need to know that this component can be started if the given
+     * services are enabled, as we will then enable the given services before
+     * starting this component.
+     *
+     * @param ignoredReferences to ignore
+     */
+    public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index f69c510..ad22c6d 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -1306,9 +1306,9 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     @Override
-	public void verifyModifiable() throws IllegalStateException {
-		if (isRunning()) {
-			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-		}
-	}>>>>>>>2215 bc848b7db395b2ca9ac7cc4dc10891393721
+    public void verifyModifiable() throws IllegalStateException {
+        if (isRunning()) {
+            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
index 816b407..db45109 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
@@ -56,99 +56,99 @@ import org.apache.nifi.util.StopWatch;
 @InputRequirement(Requirement.INPUT_REQUIRED)
 public class Base64EncodeContent extends AbstractProcessor {
 
-	public static final String ENCODE_MODE = "Encode";
-	public static final String DECODE_MODE = "Decode";
+    public static final String ENCODE_MODE = "Encode";
+    public static final String DECODE_MODE = "Decode";
 
-	public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
-		.name("Mode")
-		.description("Specifies whether the content should be encoded or decoded")
-		.required(true)
-		.allowableValues(ENCODE_MODE, DECODE_MODE)
-		.defaultValue(ENCODE_MODE)
-		.build();
-	public static final Relationship REL_SUCCESS = new Relationship.Builder()
-		.name("success")
-		.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
-		.build();
-	public static final Relationship REL_FAILURE = new Relationship.Builder()
-		.name("failure")
-		.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
-		.build();
+    public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
+        .name("Mode")
+        .description("Specifies whether the content should be encoded or decoded")
+        .required(true)
+        .allowableValues(ENCODE_MODE, DECODE_MODE)
+        .defaultValue(ENCODE_MODE)
+        .build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+        .name("success")
+        .description("Any FlowFile that is successfully encoded or decoded will be routed to success")
+        .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+        .name("failure")
+        .description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
+        .build();
 
-	private List<PropertyDescriptor> properties;
-	private Set<Relationship> relationships;
+    private List<PropertyDescriptor> properties;
+    private Set<Relationship> relationships;
 
-	@Override
-	protected void init(final ProcessorInitializationContext context) {
-		final List<PropertyDescriptor> properties = new ArrayList<>();
-		properties.add(MODE);
-		this.properties = Collections.unmodifiableList(properties);
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        final List<PropertyDescriptor> properties = new ArrayList<>();
+        properties.add(MODE);
+        this.properties = Collections.unmodifiableList(properties);
 
-		final Set<Relationship> relationships = new HashSet<>();
-		relationships.add(REL_SUCCESS);
-		relationships.add(REL_FAILURE);
-		this.relationships = Collections.unmodifiableSet(relationships);
-	}
+        final Set<Relationship> relationships = new HashSet<>();
+        relationships.add(REL_SUCCESS);
+        relationships.add(REL_FAILURE);
+        this.relationships = Collections.unmodifiableSet(relationships);
+    }
 
-	@Override
-	public Set<Relationship> getRelationships() {
-		return relationships;
-	}
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
 
-	@Override
-	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-		return properties;
-	}
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
 
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) {
-		FlowFile flowFile = session.get();
-		if (flowFile == null) {
-			return;
-		}
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) {
+        FlowFile flowFile = session.get();
+        if (flowFile == null) {
+            return;
+        }
 
-		final ProcessorLog logger = getLogger();
+        final ProcessorLog logger = getLogger();
 
-		boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
-		try {
-			final StopWatch stopWatch = new StopWatch(true);
-			if (encode) {
-				flowFile = session.write(flowFile, new StreamCallback() {
-					@Override
-					public void process(InputStream in, OutputStream out) throws IOException {
-						try (Base64OutputStream bos = new Base64OutputStream(out)) {
-							int len = -1;
-							byte[] buf = new byte[8192];
-							while ((len = in.read(buf)) > 0) {
-								bos.write(buf, 0, len);
-							}
-							bos.flush();
-						}
-					}
-				});
-			} else {
-				flowFile = session.write(flowFile, new StreamCallback() {
-					@Override
-					public void process(InputStream in, OutputStream out) throws IOException {
-						try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
-							int len = -1;
-							byte[] buf = new byte[8192];
-							while ((len = bis.read(buf)) > 0) {
-								out.write(buf, 0, len);
-							}
-							out.flush();
-						}
-					}
-				});
-			}
+        boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
+        try {
+            final StopWatch stopWatch = new StopWatch(true);
+            if (encode) {
+                flowFile = session.write(flowFile, new StreamCallback() {
+                    @Override
+                    public void process(InputStream in, OutputStream out) throws IOException {
+                        try (Base64OutputStream bos = new Base64OutputStream(out)) {
+                            int len = -1;
+                            byte[] buf = new byte[8192];
+                            while ((len = in.read(buf)) > 0) {
+                                bos.write(buf, 0, len);
+                            }
+                            bos.flush();
+                        }
+                    }
+                });
+            } else {
+                flowFile = session.write(flowFile, new StreamCallback() {
+                    @Override
+                    public void process(InputStream in, OutputStream out) throws IOException {
+                        try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
+                            int len = -1;
+                            byte[] buf = new byte[8192];
+                            while ((len = bis.read(buf)) > 0) {
+                                out.write(buf, 0, len);
+                            }
+                            out.flush();
+                        }
+                    }
+                });
+            }
 
-			logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
-			session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-			session.transfer(flowFile, REL_SUCCESS);
-		} catch (ProcessException e) {
-			logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
-			session.transfer(flowFile, REL_FAILURE);
-		}
-	}
+            logger.info("Successfully {} {}", new Object[] {encode ? "encoded" : "decoded", flowFile});
+            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
+            session.transfer(flowFile, REL_SUCCESS);
+        } catch (ProcessException e) {
+            logger.error("Failed to {} {} due to {}", new Object[] {encode ? "encode" : "decode", flowFile, e});
+            session.transfer(flowFile, REL_FAILURE);
+        }
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index a45c211..0847472 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -61,340 +61,340 @@ import org.apache.nifi.util.timebuffer.TimedBuffer;
 @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors.")
 public class ControlRate extends AbstractProcessor {
 
-	public static final String DATA_RATE = "data rate";
-	public static final String FLOWFILE_RATE = "flowfile count";
-	public static final String ATTRIBUTE_RATE = "attribute value";
-
-	public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
-		.name("Rate Control Criteria")
-		.description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
-		.required(true)
-		.allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
-		.defaultValue(DATA_RATE)
-		.build();
-	public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
-		.name("Maximum Rate")
-		.description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
-			+ "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
-		.required(true)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
-		.build();
-	public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-		.name("Rate Controlled Attribute")
-		.description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
-			+ "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
-			+ "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
-		.required(false)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-		.expressionLanguageSupported(false)
-		.build();
-	public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
-		.name("Time Duration")
-		.description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
-		.required(true)
-		.addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
-		.defaultValue("1 min")
-		.build();
-	public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-		.name("Grouping Attribute")
-		.description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
-			+ "each value specified by the attribute with this name. Changing this value resets the rate counters.")
-		.required(false)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-		.expressionLanguageSupported(false)
-		.build();
-
-	public static final Relationship REL_SUCCESS = new Relationship.Builder()
-		.name("success")
-		.description("All FlowFiles are transferred to this relationship")
-		.build();
-	public static final Relationship REL_FAILURE = new Relationship.Builder()
-		.name("failure")
-		.description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
-		.build();
-
-	private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
-	private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
-
-	private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
-	private List<PropertyDescriptor> properties;
-	private Set<Relationship> relationships;
-	private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
-
-	@Override
-	protected void init(final ProcessorInitializationContext context) {
-		final List<PropertyDescriptor> properties = new ArrayList<>();
-		properties.add(RATE_CONTROL_CRITERIA);
-		properties.add(MAX_RATE);
-		properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
-		properties.add(TIME_PERIOD);
-		properties.add(GROUPING_ATTRIBUTE_NAME);
-		this.properties = Collections.unmodifiableList(properties);
-
-		final Set<Relationship> relationships = new HashSet<>();
-		relationships.add(REL_SUCCESS);
-		this.relationships = Collections.unmodifiableSet(relationships);
-	}
-
-	@Override
-	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-		return properties;
-	}
-
-	@Override
-	public Set<Relationship> getRelationships() {
-		return relationships;
-	}
-
-	@Override
-	protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-		final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
-
-		final Validator rateValidator;
-		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-			case DATA_RATE:
-				rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
-				break;
-			case ATTRIBUTE_RATE:
-				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-				final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-				if (rateAttr == null) {
-					validationResults.add(new ValidationResult.Builder()
-						.subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
-						.explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
-						.build());
-				}
-				break;
-			case FLOWFILE_RATE:
-			default:
-				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-				break;
-		}
-
-		final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
-		if (!rateResult.isValid()) {
-			validationResults.add(rateResult);
-		}
-
-		return validationResults;
-	}
-
-	@Override
-	public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
-		super.onPropertyModified(descriptor, oldValue, newValue);
-
-		if (descriptor.equals(RATE_CONTROL_CRITERIA)
-			|| descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
-			|| descriptor.equals(GROUPING_ATTRIBUTE_NAME)
-			|| descriptor.equals(TIME_PERIOD)) {
-			// if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
-			throttleMap.clear();
-		} else if (descriptor.equals(MAX_RATE)) {
-			final long newRate;
-			if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
-				newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
-			} else {
-				newRate = Long.parseLong(newValue);
-			}
-
-			for (final Throttle throttle : throttleMap.values()) {
-				throttle.setMaxRate(newRate);
-			}
-		}
-	}
-
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-		final long lastClearTime = lastThrottleClearTime.get();
-		final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-		if (lastClearTime < throttleExpirationMillis) {
-			if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
-				final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
-				while (itr.hasNext()) {
-					final Map.Entry<String, Throttle> entry = itr.next();
-					final Throttle throttle = entry.getValue();
-					if (throttle.tryLock()) {
-						try {
-							if (throttle.lastUpdateTime() < lastClearTime) {
-								itr.remove();
-							}
-						} finally {
-							throttle.unlock();
-						}
-					}
-				}
-			}
-		}
-
-		// TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
-		FlowFile flowFile = session.get();
-		if (flowFile == null) {
-			return;
-		}
-
-		final ProcessorLog logger = getLogger();
-		final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
-		final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-		long rateValue;
-		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-			case DATA_RATE:
-				rateValue = flowFile.getSize();
-				break;
-			case FLOWFILE_RATE:
-				rateValue = 1;
-				break;
-			case ATTRIBUTE_RATE:
-				final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
-				if (attributeValue == null) {
-					logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
-					session.transfer(flowFile, REL_FAILURE);
-					return;
-				}
-
-				if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
-					logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
-						new Object[]{flowFile, rateControlAttributeName, attributeValue});
-					session.transfer(flowFile, REL_FAILURE);
-					return;
-				}
-				rateValue = Long.parseLong(attributeValue);
-				break;
-			default:
-				throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
-		}
-
-		final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
-		final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
-		Throttle throttle = throttleMap.get(groupName);
-		if (throttle == null) {
-			throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
-
-			final String maxRateValue = context.getProperty(MAX_RATE).getValue();
-			final long newRate;
-			if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
-				newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
-			} else {
-				newRate = Long.parseLong(maxRateValue);
-			}
-			throttle.setMaxRate(newRate);
-
-			throttleMap.put(groupName, throttle);
-		}
-
-		throttle.lock();
-		try {
-			if (throttle.tryAdd(rateValue)) {
-				logger.info("transferring {} to 'success'", new Object[]{flowFile});
-				session.transfer(flowFile, REL_SUCCESS);
-			} else {
-				flowFile = session.penalize(flowFile);
-				session.transfer(flowFile);
-			}
-		} finally {
-			throttle.unlock();
-		}
-	}
-
-	private static class TimestampedLong {
-
-		private final Long value;
-		private final long timestamp = System.currentTimeMillis();
-
-		public TimestampedLong(final Long value) {
-			this.value = value;
-		}
-
-		public Long getValue() {
-			return value;
-		}
-
-		public long getTimestamp() {
-			return timestamp;
-		}
-	}
-
-	private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
-
-		@Override
-		public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
-			if (oldValue == null && toAdd == null) {
-				return new TimestampedLong(0L);
-			} else if (oldValue == null) {
-				return toAdd;
-			} else if (toAdd == null) {
-				return oldValue;
-			}
-
-			return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
-		}
-
-		@Override
-		public TimestampedLong createNew() {
-			return new TimestampedLong(0L);
-		}
-
-		@Override
-		public long getTimestamp(TimestampedLong entity) {
-			return entity == null ? 0L : entity.getTimestamp();
-		}
-	}
-
-	private static class Throttle extends ReentrantLock {
-
-		private final AtomicLong maxRate = new AtomicLong(1L);
-		private final long timePeriodValue;
-		private final TimeUnit timePeriodUnit;
-		private final TimedBuffer<TimestampedLong> timedBuffer;
-		private final ProcessorLog logger;
-
-		private volatile long penalizationExpired;
-		private volatile long lastUpdateTime;
-
-		public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
-			this.timePeriodUnit = unit;
-			this.timePeriodValue = timePeriod;
-			this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
-			this.logger = logger;
-		}
-
-		public void setMaxRate(final long maxRate) {
-			this.maxRate.set(maxRate);
-		}
-
-		public long lastUpdateTime() {
-			return lastUpdateTime;
-		}
-
-		public boolean tryAdd(final long value) {
-			final long now = System.currentTimeMillis();
-			if (penalizationExpired > now) {
-				return false;
-			}
-
-			final long maxRateValue = maxRate.get();
-
-			final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
-			if (sum != null && sum.getValue() >= maxRateValue) {
-				logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
-				return false;
-			}
-
-			logger.debug("current sum for throttle is {}, so allowing rate of {} through",
-				new Object[]{sum == null ? 0 : sum.getValue(), value});
-
-			final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
-			if (transferred > maxRateValue) {
-				final long amountOver = transferred - maxRateValue;
-				// determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
-				final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
-				final double pct = (double) amountOver / (double) maxRateValue;
-				final long penalizationPeriod = (long) (milliDuration * pct);
-				this.penalizationExpired = now + penalizationPeriod;
-				logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
-			}
-
-			lastUpdateTime = now;
-			return true;
-		}
-	}
+    public static final String DATA_RATE = "data rate";
+    public static final String FLOWFILE_RATE = "flowfile count";
+    public static final String ATTRIBUTE_RATE = "attribute value";
+
+    public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
+        .name("Rate Control Criteria")
+        .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
+        .required(true)
+        .allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
+        .defaultValue(DATA_RATE)
+        .build();
+    public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
+        .name("Maximum Rate")
+        .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
+            + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
+        .required(true)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
+        .build();
+    public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+        .name("Rate Controlled Attribute")
+        .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
+            + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
+            + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
+        .required(false)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .expressionLanguageSupported(false)
+        .build();
+    public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
+        .name("Time Duration")
+        .description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
+        .required(true)
+        .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
+        .defaultValue("1 min")
+        .build();
+    public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+        .name("Grouping Attribute")
+        .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
+            + "each value specified by the attribute with this name. Changing this value resets the rate counters.")
+        .required(false)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .expressionLanguageSupported(false)
+        .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+        .name("success")
+        .description("All FlowFiles are transferred to this relationship")
+        .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+        .name("failure")
+        .description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
+        .build();
+
+    private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
+    private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
+
+    private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
+    private List<PropertyDescriptor> properties;
+    private Set<Relationship> relationships;
+    private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        final List<PropertyDescriptor> properties = new ArrayList<>();
+        properties.add(RATE_CONTROL_CRITERIA);
+        properties.add(MAX_RATE);
+        properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
+        properties.add(TIME_PERIOD);
+        properties.add(GROUPING_ATTRIBUTE_NAME);
+        this.properties = Collections.unmodifiableList(properties);
+
+        final Set<Relationship> relationships = new HashSet<>();
+        relationships.add(REL_SUCCESS);
+        this.relationships = Collections.unmodifiableSet(relationships);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @Override
+    protected Collection<ValidationResult> customValidate(final ValidationContext context) {
+        final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
+
+        final Validator rateValidator;
+        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+            case DATA_RATE:
+                rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
+                break;
+            case ATTRIBUTE_RATE:
+                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+                final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+                if (rateAttr == null) {
+                    validationResults.add(new ValidationResult.Builder()
+                        .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+                        .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+                        .build());
+                }
+                break;
+            case FLOWFILE_RATE:
+            default:
+                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+                break;
+        }
+
+        final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
+        if (!rateResult.isValid()) {
+            validationResults.add(rateResult);
+        }
+
+        return validationResults;
+    }
+
+    @Override
+    public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
+        super.onPropertyModified(descriptor, oldValue, newValue);
+
+        if (descriptor.equals(RATE_CONTROL_CRITERIA)
+            || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
+            || descriptor.equals(GROUPING_ATTRIBUTE_NAME)
+            || descriptor.equals(TIME_PERIOD)) {
+            // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
+            throttleMap.clear();
+        } else if (descriptor.equals(MAX_RATE)) {
+            final long newRate;
+            if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
+                newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
+            } else {
+                newRate = Long.parseLong(newValue);
+            }
+
+            for (final Throttle throttle : throttleMap.values()) {
+                throttle.setMaxRate(newRate);
+            }
+        }
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+        final long lastClearTime = lastThrottleClearTime.get();
+        final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+        if (lastClearTime < throttleExpirationMillis) {
+            if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
+                final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
+                while (itr.hasNext()) {
+                    final Map.Entry<String, Throttle> entry = itr.next();
+                    final Throttle throttle = entry.getValue();
+                    if (throttle.tryLock()) {
+                        try {
+                            if (throttle.lastUpdateTime() < lastClearTime) {
+                                itr.remove();
+                            }
+                        } finally {
+                            throttle.unlock();
+                        }
+                    }
+                }
+            }
+        }
+
+        // TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
+        FlowFile flowFile = session.get();
+        if (flowFile == null) {
+            return;
+        }
+
+        final ProcessorLog logger = getLogger();
+        final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
+        final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+        long rateValue;
+        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+            case DATA_RATE:
+                rateValue = flowFile.getSize();
+                break;
+            case FLOWFILE_RATE:
+                rateValue = 1;
+                break;
+            case ATTRIBUTE_RATE:
+                final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
+                if (attributeValue == null) {
+                    logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[] {flowFile, rateControlAttributeName});
+                    session.transfer(flowFile, REL_FAILURE);
+                    return;
+                }
+
+                if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
+                    logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
+                        new Object[] {flowFile, rateControlAttributeName, attributeValue});
+                    session.transfer(flowFile, REL_FAILURE);
+                    return;
+                }
+                rateValue = Long.parseLong(attributeValue);
+                break;
+            default:
+                throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
+        }
+
+        final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
+        final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
+        Throttle throttle = throttleMap.get(groupName);
+        if (throttle == null) {
+            throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
+
+            final String maxRateValue = context.getProperty(MAX_RATE).getValue();
+            final long newRate;
+            if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
+                newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
+            } else {
+                newRate = Long.parseLong(maxRateValue);
+            }
+            throttle.setMaxRate(newRate);
+
+            throttleMap.put(groupName, throttle);
+        }
+
+        throttle.lock();
+        try {
+            if (throttle.tryAdd(rateValue)) {
+                logger.info("transferring {} to 'success'", new Object[] {flowFile});
+                session.transfer(flowFile, REL_SUCCESS);
+            } else {
+                flowFile = session.penalize(flowFile);
+                session.transfer(flowFile);
+            }
+        } finally {
+            throttle.unlock();
+        }
+    }
+
+    private static class TimestampedLong {
+
+        private final Long value;
+        private final long timestamp = System.currentTimeMillis();
+
+        public TimestampedLong(final Long value) {
+            this.value = value;
+        }
+
+        public Long getValue() {
+            return value;
+        }
+
+        public long getTimestamp() {
+            return timestamp;
+        }
+    }
+
+    private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
+
+        @Override
+        public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
+            if (oldValue == null && toAdd == null) {
+                return new TimestampedLong(0L);
+            } else if (oldValue == null) {
+                return toAdd;
+            } else if (toAdd == null) {
+                return oldValue;
+            }
+
+            return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
+        }
+
+        @Override
+        public TimestampedLong createNew() {
+            return new TimestampedLong(0L);
+        }
+
+        @Override
+        public long getTimestamp(TimestampedLong entity) {
+            return entity == null ? 0L : entity.getTimestamp();
+        }
+    }
+
+    private static class Throttle extends ReentrantLock {
+
+        private final AtomicLong maxRate = new AtomicLong(1L);
+        private final long timePeriodValue;
+        private final TimeUnit timePeriodUnit;
+        private final TimedBuffer<TimestampedLong> timedBuffer;
+        private final ProcessorLog logger;
+
+        private volatile long penalizationExpired;
+        private volatile long lastUpdateTime;
+
+        public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
+            this.timePeriodUnit = unit;
+            this.timePeriodValue = timePeriod;
+            this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
+            this.logger = logger;
+        }
+
+        public void setMaxRate(final long maxRate) {
+            this.maxRate.set(maxRate);
+        }
+
+        public long lastUpdateTime() {
+            return lastUpdateTime;
+        }
+
+        public boolean tryAdd(final long value) {
+            final long now = System.currentTimeMillis();
+            if (penalizationExpired > now) {
+                return false;
+            }
+
+            final long maxRateValue = maxRate.get();
+
+            final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
+            if (sum != null && sum.getValue() >= maxRateValue) {
+                logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[] {sum.getValue(), value});
+                return false;
+            }
+
+            logger.debug("current sum for throttle is {}, so allowing rate of {} through",
+                new Object[] {sum == null ? 0 : sum.getValue(), value});
+
+            final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
+            if (transferred > maxRateValue) {
+                final long amountOver = transferred - maxRateValue;
+                // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
+                final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
+                final double pct = (double) amountOver / (double) maxRateValue;
+                final long penalizationPeriod = (long) (milliDuration * pct);
+                this.penalizationExpired = now + penalizationPeriod;
+                logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[] {value, penalizationPeriod});
+            }
+
+            lastUpdateTime = now;
+            return true;
+        }
+    }
 }


[10/19] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
index b825972..39dc725 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
@@ -28,11 +28,13 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.distributed.cache.client.Deserializer;
 import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"hash", "dupe", "duplicate", "dedupe"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Caches a value, computed from FlowFile attributes, for each incoming FlowFile and determines if the cached value has already been seen. "
         + "If so, routes the FlowFile to 'duplicate' with an attribute named 'original.identifier' that specifies the original FlowFile's"
         + "\"description\", which is specified in the <FlowFile Description> property. If the FlowFile is not determined to be a duplicate, the Processor "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
index afff3c4..73ada84 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
@@ -32,9 +32,11 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -57,6 +59,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @TriggerWhenAnyDestinationAvailable
 @Tags({"distribute", "load balance", "route", "round robin", "weighted"})
 @CapabilityDescription("Distributes FlowFiles to downstream processors based on a Distribution Strategy. If using the Round Robin "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
index 7400821..021a94f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
@@ -21,7 +21,9 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -36,6 +38,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"test", "load", "duplicate"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Intended for load testing, this processor will create the configured number of copies of each incoming FlowFile")
 public class DuplicateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
index 67c2214..de81fe5 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
@@ -26,13 +26,20 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Base32InputStream;
 import org.apache.commons.codec.binary.Base32OutputStream;
-
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.commons.codec.binary.Hex;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -41,11 +48,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processors.standard.util.ValidatingBase32InputStream;
 import org.apache.nifi.processors.standard.util.ValidatingBase64InputStream;
@@ -55,6 +57,7 @@ import org.apache.nifi.util.StopWatch;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encode", "decode", "base64", "hex"})
 @CapabilityDescription("Encodes the FlowFile content in base64")
 public class EncodeContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
index 6492d0a..7b98189 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
@@ -27,6 +27,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -55,6 +57,7 @@ import org.bouncycastle.jce.provider.BouncyCastleProvider;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encryption", "decryption", "password", "JCE", "OpenPGP", "PGP", "GPG"})
 @CapabilityDescription("Encrypts or Decrypts a FlowFile using either symmetric encryption with a password and randomly generated salt, or asymmetric encryption using a public and secret key.")
 public class EncryptContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
index ad3120c..db60f13 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
@@ -16,13 +16,25 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -42,24 +54,16 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"JSON", "evaluate", "JsonPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more JsonPath expressions against the content of a FlowFile. "
         + "The results of those expressions are assigned to FlowFile Attributes or are written to the content of the FlowFile itself, "
         + "depending on configuration of the Processor. "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
index 80b1795..6b3c514 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
@@ -49,40 +49,43 @@ import javax.xml.xpath.XPathExpressionException;
 import javax.xml.xpath.XPathFactory;
 import javax.xml.xpath.XPathFactoryConfigurationException;
 
-import net.sf.saxon.lib.NamespaceConstant;
-import net.sf.saxon.xpath.XPathEvaluator;
-
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.lib.NamespaceConstant;
+import net.sf.saxon.xpath.XPathEvaluator;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more XPaths against the content of a FlowFile. The results of those XPaths are assigned to "
         + "FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of the "
         + "Processor. XPaths are entered by adding user-defined properties; the name of the property maps to the Attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
index 3291b55..f8db8f8 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
@@ -40,23 +40,15 @@ import javax.xml.transform.TransformerFactoryConfigurationError;
 import javax.xml.transform.sax.SAXSource;
 import javax.xml.transform.stream.StreamResult;
 
-import net.sf.saxon.s9api.DOMDestination;
-import net.sf.saxon.s9api.Processor;
-import net.sf.saxon.s9api.SaxonApiException;
-import net.sf.saxon.s9api.XQueryCompiler;
-import net.sf.saxon.s9api.XQueryEvaluator;
-import net.sf.saxon.s9api.XQueryExecutable;
-import net.sf.saxon.s9api.XdmItem;
-import net.sf.saxon.s9api.XdmNode;
-import net.sf.saxon.s9api.XdmValue;
-
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -78,10 +70,21 @@ import org.apache.nifi.util.ObjectHolder;
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.s9api.DOMDestination;
+import net.sf.saxon.s9api.Processor;
+import net.sf.saxon.s9api.SaxonApiException;
+import net.sf.saxon.s9api.XQueryCompiler;
+import net.sf.saxon.s9api.XQueryEvaluator;
+import net.sf.saxon.s9api.XQueryExecutable;
+import net.sf.saxon.s9api.XdmItem;
+import net.sf.saxon.s9api.XdmNode;
+import net.sf.saxon.s9api.XdmValue;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath", "XQuery"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription(
         "Evaluates one or more XQueries against the content of a FlowFile.  The results of those XQueries are assigned "
         + "to FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
index c8a67a0..fd6bb05 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
@@ -43,6 +43,8 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -60,6 +62,7 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.ArgumentUtils;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"command", "process", "source", "external", "invoke", "script"})
 @CapabilityDescription("Runs an operating system command specified by the user and writes the output of that command to a FlowFile. If the command is expected "
         + "to be long-running, the Processor can output the partial data on a specified interval. When this option is used, the output is expected to be in textual "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
index 45fd1a8..5e25bdd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
@@ -30,6 +30,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -48,6 +50,7 @@ import org.apache.nifi.util.LongHolder;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({ "sql", "select", "jdbc", "query", "database" })
 @CapabilityDescription("Execute provided SQL select query. Query result will be converted to Avro format."
     + " Streaming is used so arbitrarily large result sets are supported. This processor can be scheduled to run on " +

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
index 633ce61..9bea6ba 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
@@ -35,11 +35,13 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -119,6 +121,7 @@ import org.apache.nifi.stream.io.StreamUtils;
  */
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"command execution", "command", "stream", "execute"})
 @CapabilityDescription("Executes an external command on the contents of a flow file, and creates a new flow file with the results of the command.")
 @DynamicProperty(name = "An environment variable name", value = "An environment variable value", description = "These environment variables are passed to the process spawned by this Processor")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
index 29b9c20..9583b8e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
@@ -34,6 +34,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -56,6 +58,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"evaluate", "extract", "Text", "Regular Expression", "regex"})
 @CapabilityDescription(
         "Evaluates one or more Regular Expressions against the content of a FlowFile.  "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
index aa1206a..4feee1b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
@@ -26,6 +26,12 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.processor.AbstractProcessor;
@@ -34,15 +40,12 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"test", "random", "generate"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("This processor creates FlowFiles of random data and is used for load testing")
 public class GenerateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
index ff5b599..7c78faa 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
@@ -20,17 +20,20 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"FTP", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an FTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
index 0fa9178..ced79cd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
@@ -49,12 +49,14 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
@@ -70,6 +72,7 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"local", "files", "filesystem", "ingest", "ingress", "get", "source", "input"})
 @CapabilityDescription("Creates FlowFiles from files in a directory.  NiFi will ignore files it doesn't have at least read permissions for.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
index 7099552..48ca2de 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
@@ -69,6 +69,8 @@ import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
 import org.apache.http.impl.client.BasicCredentialsProvider;
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -94,6 +96,7 @@ import org.apache.nifi.ssl.SSLContextService.ClientAuth;
 import org.apache.nifi.util.StopWatch;
 
 @Tags({"get", "fetch", "poll", "http", "https", "ingest", "source", "input"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches a file via HTTP")
 @WritesAttributes({
     @WritesAttribute(attribute = "filename", description = "The filename is set to the name of the file on the remote server"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
index 6be505a..0ba7f98 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
@@ -21,6 +21,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 
 import javax.jms.JMSException;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -34,6 +36,7 @@ import org.apache.nifi.processors.standard.util.JmsFactory;
 import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "queue", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Queue, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
index e7209cc..272c7ab 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
@@ -41,6 +41,8 @@ import javax.jms.InvalidDestinationException;
 import javax.jms.JMSException;
 import javax.jms.Session;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -60,6 +62,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "topic", "subscription", "durable", "non-durable", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Topic, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
index 7841bec..63256f3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
@@ -21,11 +21,13 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -34,6 +36,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an SFTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
index 2583e88..49bad40 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
@@ -44,11 +44,13 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
@@ -75,6 +77,7 @@ import org.eclipse.jetty.util.ssl.SslContextFactory;
 
 import com.sun.jersey.api.client.ClientResponse.Status;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"http", "https", "request", "listen", "ingress", "web service"})
 @CapabilityDescription("Starts an HTTP Server and listens for HTTP Requests. For each request, creates a FlowFile and transfers to 'success'. "
         + "This Processor is designed to be used in conjunction with the HandleHttpResponse Processor in order to create a Web Service")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
index 6de3fe6..a4317dc 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
@@ -27,8 +27,10 @@ import java.util.regex.Pattern;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "response", "egress", "web service"})
 @CapabilityDescription("Sends an HTTP Response to the Requestor that generated a FlowFile. This Processor is designed to be used in conjunction with "
         + "the HandleHttpRequest in order to create a web service.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
index b3dbf83..a0c603c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
@@ -33,11 +33,13 @@ import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -96,6 +98,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "hash"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Hashes together the key/value pairs of several FlowFile Attributes and adds the hash as a new attribute. "
         + "Optional properties are to be added such that the name of the property is the name of a FlowFile Attribute to consider "
         + "and the value of the property is a regular expression that, if matched by the attribute value, will cause that attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
index 526754e..9885599 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
@@ -29,10 +29,12 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -50,6 +52,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hash", "content", "MD5", "SHA-1", "SHA-256"})
 @CapabilityDescription("Calculates a hash value for the Content of a FlowFile and puts that hash value on the FlowFile as an attribute whose name "
         + "is determined by the <Hash Attribute Name> property")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
index 5f16ff3..d09117d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
@@ -24,11 +24,13 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.logging.ProcessorLog;
@@ -65,6 +67,7 @@ import org.apache.tika.mime.MimeTypeException;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"compression", "gzip", "bzip2", "zip", "MIME", "mime.type", "file", "identify"})
 @CapabilityDescription("Attempts to identify the MIME Type used for a FlowFile. If the MIME Type can be identified, "
         + "an attribute with the name 'mime.type' is added with the value being the MIME Type. If the MIME Type cannot be determined, "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
index f16eb9c..a06b3d6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
@@ -54,6 +54,8 @@ import javax.net.ssl.SSLSession;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -77,6 +79,7 @@ import org.joda.time.format.DateTimeFormatter;
 
 @SupportsBatching
 @Tags({"http", "https", "rest", "client"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("An HTTP client processor which converts FlowFile attributes to HTTP headers, with configurable HTTP method, url, etc.")
 @WritesAttributes({
     @WritesAttribute(attribute = "invokehttp.status.code", description = "The status code that is returned"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
index c7842d9..258e122 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
@@ -31,10 +31,14 @@ import java.util.regex.Pattern;
 import javax.servlet.Servlet;
 import javax.ws.rs.Path;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
-import org.apache.nifi.stream.io.StreamThrottler;
 import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
@@ -42,15 +46,12 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.servlets.ContentAcknowledgmentServlet;
 import org.apache.nifi.processors.standard.servlets.ListenHTTPServlet;
 import org.apache.nifi.ssl.SSLContextService;
-
+import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
+import org.apache.nifi.stream.io.StreamThrottler;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.HttpConfiguration;
 import org.eclipse.jetty.server.HttpConnectionFactory;
@@ -62,6 +63,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"ingest", "http", "https", "rest", "listen"})
 @CapabilityDescription("Starts an HTTP Server that is used to receive FlowFiles from remote sources. The URL of the Service will be http://{hostname}:{port}/contentListener")
 public class ListenHTTP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
index 6a88bd4..b620dd3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
@@ -41,6 +41,15 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -58,19 +67,11 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.UDPStreamConsumer;
 import org.apache.nifi.util.Tuple;
 
-import org.apache.commons.lang3.StringUtils;
-
 /**
  * <p>
  * This processor listens for Datagram Packets on a given port and concatenates the contents of those packets together generating flow files roughly as often as the internal buffer fills up or until
@@ -113,6 +114,7 @@ import org.apache.commons.lang3.StringUtils;
  */
 @TriggerWhenEmpty
 @Tags({"ingest", "udp", "listen", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Listens for Datagram Packets on a given port and concatenates the contents of those packets "
         + "together generating flow files")
 public class ListenUDP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
index 6d0b643..5cd5b14 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
@@ -27,6 +27,14 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -35,22 +43,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.eclipse.jetty.util.StringUtil;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "logging"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class LogAttribute extends AbstractProcessor {
 
     public static final PropertyDescriptor LOG_LEVEL = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index e9258df..2cad11e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -48,15 +48,17 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
-import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
@@ -86,6 +88,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @SideEffectFree
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"merge", "content", "correlation", "tar", "zip", "stream", "concatenation", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Merges a Group of FlowFiles together based on a user-defined strategy and packages them into a single FlowFile. "
         + "It is recommended that the Processor be configured with only a single incoming connection, as Group of FlowFiles will not be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
index be21b32..e0efa3d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
@@ -25,28 +25,32 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
 @SideEffectFree
 @Tags({"binary", "discard", "keep"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Keep or discard bytes range from a binary file.")
 public class ModifyBytes extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
index 2900623..426b792 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
@@ -16,6 +16,22 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
@@ -36,23 +52,10 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
 @SideEffectFree
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"monitor", "flow", "active", "inactive", "activity", "detection"})
 @CapabilityDescription("Monitors the flow for activity and sends out an indicator when the flow has not had any data for "
         + "some specified amount of time and again when the flow's activity is restored")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
index 51f28e0..ef84629 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
@@ -82,6 +82,8 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.protocol.HttpCoreContext;
 import org.apache.http.util.EntityUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -121,6 +123,7 @@ import org.apache.nifi.util.StopWatch;
 import com.sun.jersey.api.client.ClientResponse.Status;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "remote", "copy", "archive"})
 @CapabilityDescription("Performs an HTTP Post with the content of the FlowFile")
 @ReadsAttribute(attribute = "mime.type", description = "If not sending data as a FlowFile, the mime.type attribute will be used to set the HTTP Header for Content-Type")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
index 7e2dd31..5605b8d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
@@ -47,6 +47,8 @@ import javax.mail.internet.PreencodedMimeBodyPart;
 import javax.mail.util.ByteArrayDataSource;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -67,6 +69,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"email", "put", "notify", "smtp"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends an e-mail to configured recipients for each incoming FlowFile")
 public class PutEmail extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
index b959efa..1679982 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
@@ -28,6 +28,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "ftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an FTP Server")
 @SeeAlso(GetFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
index 3bbe093..8c4b00f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
@@ -34,6 +34,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"put", "local", "copy", "archive", "files", "filesystem"})
 @CapabilityDescription("Writes the contents of a FlowFile to the local file system")
 @SeeAlso(GetFile.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
index 034a3fc..dff5a6b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
@@ -39,9 +39,9 @@ import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TTL
 import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TYPE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_BYTE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_EMPTY;
+import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_STREAM;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_TEXT;
-import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.PASSWORD;
 import static org.apache.nifi.processors.standard.util.JmsProperties.REPLY_TO_QUEUE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.TIMEOUT;
@@ -70,6 +70,8 @@ import javax.jms.MessageProducer;
 import javax.jms.Session;
 import javax.jms.StreamMessage;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -89,6 +91,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageProducer;
 import org.apache.nifi.stream.io.StreamUtils;
 
 @Tags({"jms", "send", "put"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Creates a JMS Message from the contents of a FlowFile and sends the message to a JMS Server")
 @SeeAlso({GetJMSQueue.class, GetJMSTopic.class})
 public class PutJMS extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
index 97fe7e5..48cfc26 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -31,6 +33,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "sftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an SFTP Server")
 @SeeAlso(GetSFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
index b087737..0913f86 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
@@ -45,6 +45,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
@@ -68,6 +70,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 
 @SupportsBatching
 @SeeAlso(ConvertJSONToSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"sql", "put", "rdbms", "database", "update", "insert", "relational"})
 @CapabilityDescription("Executes a SQL UPDATE or INSERT command. The content of an incoming FlowFile is expected to be the SQL command "
         + "to execute. The SQL command may use the ? to escape parameters. In this case, the parameters to use must exist as FlowFile attributes "


[07/19] nifi git commit: NIFI-810: - Adding basic support for preventing connection when appropriate. - Updating validation when [dis]connecting processors.

Posted by ma...@apache.org.
NIFI-810:
- Adding basic support for preventing connection when appropriate.
- Updating validation when [dis]connecting processors.


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/13edcfda
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/13edcfda
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/13edcfda

Branch: refs/heads/master
Commit: 13edcfda2ef830e1b160b31f7cd3bea874ccd3f0
Parents: 4afd8f8
Author: Matt Gilman <ma...@gmail.com>
Authored: Fri Sep 25 17:46:58 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/web/api/dto/ProcessorDTO.java    | 15 +++++++++++++++
 .../org/apache/nifi/controller/TemplateManager.java  |  1 +
 .../java/org/apache/nifi/web/api/dto/DtoFactory.java |  1 +
 .../src/main/webapp/js/nf/canvas/nf-actions.js       |  4 +++-
 .../src/main/webapp/js/nf/canvas/nf-canvas-utils.js  | 13 ++++++++++---
 .../js/nf/canvas/nf-connection-configuration.js      |  8 ++++++--
 6 files changed, 36 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
index c65c46a..866d77c 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
@@ -36,6 +36,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     private String description;
     private Boolean supportsParallelProcessing;
     private Boolean supportsEventDriven;
+    private String inputRequirement;
 
     private ProcessorConfigDTO config;
 
@@ -121,6 +122,20 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
+     * @return the input requirement of this processor
+     */
+    @ApiModelProperty(
+            value = "The input requirement for this processor."
+    )
+    public String getInputRequirement() {
+        return inputRequirement;
+    }
+
+    public void setInputRequirement(String inputRequirement) {
+        this.inputRequirement = inputRequirement;
+    }
+
+    /**
      * @return whether this processor supports event driven scheduling
      */
     @ApiModelProperty(

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
index 7b8e173..a332e05 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
@@ -320,6 +320,7 @@ public class TemplateManager {
 
             // remove validation errors
             processorDTO.setValidationErrors(null);
+            processorDTO.setInputRequirement(null);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
index 76bce6f..16b114e 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
@@ -1402,6 +1402,7 @@ public final class DtoFactory {
         dto.setPosition(createPositionDto(node.getPosition()));
         dto.setStyle(node.getStyle());
         dto.setParentGroupId(node.getProcessGroup().getIdentifier());
+        dto.setInputRequirement(node.getInputRequirement().name());
 
         dto.setType(node.getProcessor().getClass().getCanonicalName());
         dto.setName(node.getName());

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
index 3b47a8d..c6ef75f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
@@ -737,7 +737,9 @@ nf.Actions = (function () {
                             var destinationData = destination.datum();
 
                             // update the destination component accordingly
-                            if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                            if (nf.CanvasUtils.isProcessor(destination)) {
+                                nf.Processor.reload(destinationData.component);
+                            } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                                 nf.RemoteProcessGroup.reload(destinationData.component);
                             }
                         } else {

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
index 9f56e30..1be551f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
@@ -1371,9 +1371,16 @@ nf.CanvasUtils = (function () {
                 return false;
             }
 
-            return nf.CanvasUtils.isProcessor(selection) || nf.CanvasUtils.isProcessGroup(selection) ||
-                    nf.CanvasUtils.isRemoteProcessGroup(selection) || nf.CanvasUtils.isOutputPort(selection) ||
-                    nf.CanvasUtils.isFunnel(selection);
+            if (nf.CanvasUtils.isProcessGroup(selection) || nf.CanvasUtils.isRemoteProcessGroup(selection) ||
+                    nf.CanvasUtils.isOutputPort(selection) || nf.CanvasUtils.isFunnel(selection)) {
+                return true;
+            }
+
+            // if processor, ensure it supports input
+            if (nf.CanvasUtils.isProcessor(selection)) {
+                var destinationData = selection.datum();
+                return destinationData.component.inputRequirement !== 'INPUT_FORBIDDEN';
+            }
         }
     };
 }());
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
index cc246cf..1bafa7d 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
@@ -870,7 +870,9 @@ nf.ConnectionConfiguration = (function () {
                 }
 
                 // update the destination component accordingly
-                if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                if (nf.CanvasUtils.isProcessor(destination)) {
+                    nf.Processor.reload(destinationData.component);
+                } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                     nf.RemoteProcessGroup.reload(destinationData.component);
                 }
 
@@ -958,7 +960,9 @@ nf.ConnectionConfiguration = (function () {
                     }
 
                     // update the destination component accordingly
-                    if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                    if (nf.CanvasUtils.isProcessor(destination)) {
+                        nf.Processor.reload(destinationData.component);
+                    } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                         nf.RemoteProcessGroup.reload(destinationData.component);
                     }
                 }


[19/19] nifi git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/nifi

Posted by ma...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/nifi


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/8a800608
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/8a800608
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/8a800608

Branch: refs/heads/master
Commit: 8a8006085190aae1125c883f2aab16a4ada9beb8
Parents: 0636f0e f2c4f2d
Author: Mark Payne <ma...@hotmail.com>
Authored: Sun Oct 25 11:11:49 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Sun Oct 25 11:11:49 2015 -0400

----------------------------------------------------------------------
 .../hadoop/AbstractHadoopProcessor.java         | 92 +++++++++++++++++---
 1 file changed, 79 insertions(+), 13 deletions(-)
----------------------------------------------------------------------



[08/19] nifi git commit: NIFI-810: Updated the wording on validation errors due to upstream connections

Posted by ma...@apache.org.
NIFI-810: Updated the wording on validation errors due to upstream connections


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/5ecdb185
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/5ecdb185
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/5ecdb185

Branch: refs/heads/master
Commit: 5ecdb1858e94938cd426b12bc48d3725109c6e96
Parents: 13edcfd
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:26:04 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../nifi/controller/StandardProcessorNode.java  | 2475 +++++++++---------
 1 file changed, 1237 insertions(+), 1238 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/5ecdb185/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index 0c39eda..3c816d0 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -74,1242 +74,1241 @@ import org.slf4j.LoggerFactory;
  */
 public class StandardProcessorNode extends ProcessorNode implements Connectable {
 
-	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
-
-	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
-	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
-	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
-	private final AtomicReference<ProcessGroup> processGroup;
-	private final Processor processor;
-	private final AtomicReference<String> identifier;
-	private final Map<Connection, Connectable> destinations;
-	private final Map<Relationship, Set<Connection>> connections;
-	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
-	private final AtomicReference<List<Connection>> incomingConnectionsRef;
-	private final ReentrantReadWriteLock rwLock;
-	private final Lock readLock;
-	private final Lock writeLock;
-	private final AtomicBoolean isolated;
-	private final AtomicBoolean lossTolerant;
-	private final AtomicReference<ScheduledState> scheduledState;
-	private final AtomicReference<String> comments;
-	private final AtomicReference<String> name;
-	private final AtomicReference<Position> position;
-	private final AtomicReference<String> annotationData;
-	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
-	private final AtomicReference<String> yieldPeriod;
-	private final AtomicReference<String> penalizationPeriod;
-	private final AtomicReference<Map<String, String>> style;
-	private final AtomicInteger concurrentTaskCount;
-	private final AtomicLong yieldExpiration;
-	private final AtomicLong schedulingNanos;
-	private final boolean triggerWhenEmpty;
-	private final boolean sideEffectFree;
-	private final boolean triggeredSerially;
-	private final boolean triggerWhenAnyDestinationAvailable;
-	private final boolean eventDrivenSupported;
-	private final boolean batchSupported;
-	private final Requirement inputRequirement;
-	private final ValidationContextFactory validationContextFactory;
-	private final ProcessScheduler processScheduler;
-	private long runNanos = 0L;
-
-	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
-
-	@SuppressWarnings("deprecation")
-	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
-		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
-		super(processor, uuid, validationContextFactory, controllerServiceProvider);
-
-		this.processor = processor;
-		identifier = new AtomicReference<>(uuid);
-		destinations = new HashMap<>();
-		connections = new HashMap<>();
-		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
-		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
-		rwLock = new ReentrantReadWriteLock(false);
-		readLock = rwLock.readLock();
-		writeLock = rwLock.writeLock();
-		lossTolerant = new AtomicBoolean(false);
-		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
-		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
-		comments = new AtomicReference<>("");
-		name = new AtomicReference<>(processor.getClass().getSimpleName());
-		schedulingPeriod = new AtomicReference<>("0 sec");
-		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
-		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
-		yieldExpiration = new AtomicLong(0L);
-		concurrentTaskCount = new AtomicInteger(1);
-		position = new AtomicReference<>(new Position(0D, 0D));
-		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
-		this.processGroup = new AtomicReference<>();
-		processScheduler = scheduler;
-		annotationData = new AtomicReference<>();
-		isolated = new AtomicBoolean(false);
-		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
-
-		final Class<?> procClass = processor.getClass();
-		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
-		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
-		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
-		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
-		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
-			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
-		this.validationContextFactory = validationContextFactory;
-		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
-			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
-
-		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
-		if (inputRequirementPresent) {
-			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
-		} else {
-			inputRequirement = Requirement.INPUT_ALLOWED;
-		}
-
-		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
-	}
-
-	/**
-	 * @return comments about this specific processor instance
-	 */
-	@Override
-	public String getComments() {
-		return comments.get();
-	}
-
-	/**
-	 * Provides and opportunity to retain information about this particular processor instance
-	 *
-	 * @param comments new comments
-	 */
-	@Override
-	public void setComments(final String comments) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.comments.set(comments);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public ScheduledState getScheduledState() {
-		return scheduledState.get();
-	}
-
-	@Override
-	public Position getPosition() {
-		return position.get();
-	}
-
-	@Override
-	public void setPosition(Position position) {
-		this.position.set(position);
-	}
-
-	@Override
-	public Map<String, String> getStyle() {
-		return style.get();
-	}
-
-	@Override
-	public void setStyle(final Map<String, String> style) {
-		if (style != null) {
-			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
-		}
-	}
-
-	@Override
-	public String getIdentifier() {
-		return identifier.get();
-	}
-
-	/**
-	 * @return if true flow file content generated by this processor is considered loss tolerant
-	 */
-	@Override
-	public boolean isLossTolerant() {
-		return lossTolerant.get();
-	}
-
-	@Override
-	public boolean isIsolated() {
-		return isolated.get();
-	}
-
-	/**
-	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isTriggerWhenEmpty() {
-		return triggerWhenEmpty;
-	}
-
-	/**
-	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isSideEffectFree() {
-		return sideEffectFree;
-	}
-
-	@Override
-	public boolean isHighThroughputSupported() {
-		return batchSupported;
-	}
-
-	/**
-	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isTriggerWhenAnyDestinationAvailable() {
-		return triggerWhenAnyDestinationAvailable;
-	}
-
-	/**
-	 * Indicates whether flow file content made by this processor must be persisted
-	 *
-	 * @param lossTolerant tolerant
-	 */
-	@Override
-	public void setLossTolerant(final boolean lossTolerant) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.lossTolerant.set(lossTolerant);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Indicates whether the processor runs on only the primary node.
-	 *
-	 * @param isolated isolated
-	 */
-	public void setIsolated(final boolean isolated) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.isolated.set(isolated);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isAutoTerminated(final Relationship relationship) {
-		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
-		if (terminatable == null) {
-			return false;
-		}
-		return terminatable.contains(relationship);
-	}
-
-	@Override
-	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-
-			for (final Relationship rel : terminate) {
-				if (!getConnections(rel).isEmpty()) {
-					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
-				}
-			}
-			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
-	 */
-	@Override
-	public Set<Relationship> getAutoTerminatedRelationships() {
-		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
-		if (relationships == null) {
-			relationships = new HashSet<>();
-		}
-		return Collections.unmodifiableSet(relationships);
-	}
-
-	@Override
-	public String getName() {
-		return name.get();
-	}
-
-	/**
-	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
-	 */
-	@SuppressWarnings("deprecation")
-	public String getProcessorDescription() {
-		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
-		String description = null;
-		if (capDesc != null) {
-			description = capDesc.value();
-		} else {
-			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
-			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
-			if (deprecatedCapDesc != null) {
-				description = deprecatedCapDesc.value();
-			}
-		}
-
-		return description;
-	}
-
-	@Override
-	public void setName(final String name) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.name.set(name);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
-	 */
-	@Override
-	public long getSchedulingPeriod(final TimeUnit timeUnit) {
-		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
-	}
-
-	@Override
-	public boolean isEventDrivenSupported() {
-		readLock.lock();
-		try {
-			return this.eventDrivenSupported;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	/**
-	 * Updates the Scheduling Strategy used for this Processor
-	 *
-	 * @param schedulingStrategy strategy
-	 *
-	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
-	 */
-	@Override
-	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
-		writeLock.lock();
-		try {
-			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
-				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
-				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
-				// Mode. Instead, we will simply leave it in Timer-Driven mode
-				return;
-			}
-
-			this.schedulingStrategy = schedulingStrategy;
-			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @return the currently configured scheduling strategy
-	 */
-	@Override
-	public SchedulingStrategy getSchedulingStrategy() {
-		readLock.lock();
-		try {
-			return this.schedulingStrategy;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public String getSchedulingPeriod() {
-		return schedulingPeriod.get();
-	}
-
-	@Override
-	public void setScheduldingPeriod(final String schedulingPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-
-			switch (schedulingStrategy) {
-				case CRON_DRIVEN: {
-					try {
-						new CronExpression(schedulingPeriod);
-					} catch (final Exception e) {
-						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
-					}
-				}
-				break;
-				case PRIMARY_NODE_ONLY:
-				case TIMER_DRIVEN: {
-					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
-					if (schedulingNanos < 0) {
-						throw new IllegalArgumentException("Scheduling Period must be positive");
-					}
-					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
-				}
-				break;
-				case EVENT_DRIVEN:
-				default:
-					return;
-			}
-
-			this.schedulingPeriod.set(schedulingPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public long getRunDuration(final TimeUnit timeUnit) {
-		readLock.lock();
-		try {
-			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
-		writeLock.lock();
-		try {
-			if (duration < 0) {
-				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
-			}
-
-			this.runNanos = timeUnit.toNanos(duration);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public long getYieldPeriod(final TimeUnit timeUnit) {
-		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-	}
-
-	@Override
-	public String getYieldPeriod() {
-		return yieldPeriod.get();
-	}
-
-	@Override
-	public void setYieldPeriod(final String yieldPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
-			if (yieldMillis < 0) {
-				throw new IllegalArgumentException("Yield duration must be positive");
-			}
-			this.yieldPeriod.set(yieldPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
-	 * methods.
-	 */
-	@Override
-	public void yield() {
-		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
-		yield(yieldMillis, TimeUnit.MILLISECONDS);
-
-		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
-		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
-	}
-
-	@Override
-	public void yield(final long period, final TimeUnit timeUnit) {
-		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
-		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
-
-		processScheduler.yield(this);
-	}
-
-	/**
-	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
-	 */
-	@Override
-	public long getYieldExpiration() {
-		return yieldExpiration.get();
-	}
-
-	@Override
-	public long getPenalizationPeriod(final TimeUnit timeUnit) {
-		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-	}
-
-	@Override
-	public String getPenalizationPeriod() {
-		return penalizationPeriod.get();
-	}
-
-	@Override
-	public void setPenalizationPeriod(final String penalizationPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
-			if (penalizationMillis < 0) {
-				throw new IllegalArgumentException("Penalization duration must be positive");
-			}
-			this.penalizationPeriod.set(penalizationPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Determines the number of concurrent tasks that may be running for this processor.
-	 *
-	 * @param taskCount a number of concurrent tasks this processor may have running
-	 * @throws IllegalArgumentException if the given value is less than 1
-	 */
-	@Override
-	public void setMaxConcurrentTasks(final int taskCount) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
-				throw new IllegalArgumentException();
-			}
-			if (!triggeredSerially) {
-				concurrentTaskCount.set(taskCount);
-			}
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isTriggeredSerially() {
-		return triggeredSerially;
-	}
-
-	/**
-	 * @return the number of tasks that may execute concurrently for this processor
-	 */
-	@Override
-	public int getMaxConcurrentTasks() {
-		return concurrentTaskCount.get();
-	}
-
-	@Override
-	public LogLevel getBulletinLevel() {
-		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
-	}
-
-	@Override
-	public void setBulletinLevel(final LogLevel level) {
-		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
-	}
-
-	@Override
-	public Set<Connection> getConnections() {
-		final Set<Connection> allConnections = new HashSet<>();
-		readLock.lock();
-		try {
-			for (final Set<Connection> connectionSet : connections.values()) {
-				allConnections.addAll(connectionSet);
-			}
-		} finally {
-			readLock.unlock();
-		}
-
-		return allConnections;
-	}
-
-	@Override
-	public List<Connection> getIncomingConnections() {
-		return incomingConnectionsRef.get();
-	}
-
-	@Override
-	public Set<Connection> getConnections(final Relationship relationship) {
-		final Set<Connection> applicableConnections;
-		readLock.lock();
-		try {
-			applicableConnections = connections.get(relationship);
-		} finally {
-			readLock.unlock();
-		}
-		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
-	}
-
-	@Override
-	public void addConnection(final Connection connection) {
-		Objects.requireNonNull(connection, "connection cannot be null");
-
-		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
-			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
-		}
-
-		writeLock.lock();
-		try {
-			List<Connection> updatedIncoming = null;
-			if (connection.getDestination().equals(this)) {
-				// don't add the connection twice. This may occur if we have a self-loop because we will be told
-				// to add the connection once because we are the source and again because we are the destination.
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				updatedIncoming = new ArrayList<>(incomingConnections);
-				if (!updatedIncoming.contains(connection)) {
-					updatedIncoming.add(connection);
-				}
-			}
-
-			if (connection.getSource().equals(this)) {
-				// don't add the connection twice. This may occur if we have a self-loop because we will be told
-				// to add the connection once because we are the source and again because we are the destination.
-				if (!destinations.containsKey(connection)) {
-					for (final Relationship relationship : connection.getRelationships()) {
-						final Relationship rel = getRelationship(relationship.getName());
-						Set<Connection> set = connections.get(rel);
-						if (set == null) {
-							set = new HashSet<>();
-							connections.put(rel, set);
-						}
-
-						set.add(connection);
-
-						destinations.put(connection, connection.getDestination());
-					}
-
-					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-					if (autoTerminated != null) {
-						autoTerminated.removeAll(connection.getRelationships());
-						this.undefinedRelationshipsToTerminate.set(autoTerminated);
-					}
-				}
-			}
-
-			if (updatedIncoming != null) {
-				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-			}
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean hasIncomingConnection() {
-		return !incomingConnectionsRef.get().isEmpty();
-	}
-
-	@Override
-	public void updateConnection(final Connection connection) throws IllegalStateException {
-		if (requireNonNull(connection).getSource().equals(this)) {
-			writeLock.lock();
-			try {
-				//
-				// update any relationships
-				//
-				// first check if any relations were removed.
-				final List<Relationship> existingRelationships = new ArrayList<>();
-				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
-					if (entry.getValue().contains(connection)) {
-						existingRelationships.add(entry.getKey());
-					}
-				}
-
-				for (final Relationship rel : connection.getRelationships()) {
-					if (!existingRelationships.contains(rel)) {
-						// relationship was removed. Check if this is legal.
-						final Set<Connection> connectionsForRelationship = getConnections(rel);
-						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
-							// if we are running and we do not terminate undefined relationships and this is the only
-							// connection that defines the given relationship, and that relationship is required,
-							// then it is not legal to remove this relationship from this connection.
-							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
-								+ this + ", which is currently running");
-						}
-					}
-				}
-
-				// remove the connection from any list that currently contains
-				for (final Set<Connection> list : connections.values()) {
-					list.remove(connection);
-				}
-
-				// add the connection in for all relationships listed.
-				for (final Relationship rel : connection.getRelationships()) {
-					Set<Connection> set = connections.get(rel);
-					if (set == null) {
-						set = new HashSet<>();
-						connections.put(rel, set);
-					}
-					set.add(connection);
-				}
-
-				// update to the new destination
-				destinations.put(connection, connection.getDestination());
-
-				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-				if (autoTerminated != null) {
-					autoTerminated.removeAll(connection.getRelationships());
-					this.undefinedRelationshipsToTerminate.set(autoTerminated);
-				}
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (connection.getDestination().equals(this)) {
-			writeLock.lock();
-			try {
-				// update our incoming connections -- we can just remove & re-add the connection to
-				// update the list.
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-				updatedIncoming.remove(connection);
-				updatedIncoming.add(connection);
-				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-			} finally {
-				writeLock.unlock();
-			}
-		}
-	}
-
-	@Override
-	public void removeConnection(final Connection connection) {
-		boolean connectionRemoved = false;
-
-		if (requireNonNull(connection).getSource().equals(this)) {
-			for (final Relationship relationship : connection.getRelationships()) {
-				final Set<Connection> connectionsForRelationship = getConnections(relationship);
-				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
-					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
-				}
-			}
-
-			writeLock.lock();
-			try {
-				for (final Set<Connection> connectionList : this.connections.values()) {
-					connectionList.remove(connection);
-				}
-
-				connectionRemoved = (destinations.remove(connection) != null);
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (connection.getDestination().equals(this)) {
-			writeLock.lock();
-			try {
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				if (incomingConnections.contains(connection)) {
-					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-					updatedIncoming.remove(connection);
-					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-					return;
-				}
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (!connectionRemoved) {
-			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
-		}
-	}
-
-	/**
-	 * @param relationshipName name
-	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
-	 */
-	@Override
-	public Relationship getRelationship(final String relationshipName) {
-		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
-		Relationship returnRel = specRel;
-
-		final Set<Relationship> relationships;
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			relationships = processor.getRelationships();
-		}
-
-		for (final Relationship rel : relationships) {
-			if (rel.equals(specRel)) {
-				returnRel = rel;
-				break;
-			}
-		}
-		return returnRel;
-	}
-
-	@Override
-	public Processor getProcessor() {
-		return this.processor;
-	}
-
-	/**
-	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
-	 */
-	public Set<Connectable> getDestinations() {
-		final Set<Connectable> nonSelfDestinations = new HashSet<>();
-		readLock.lock();
-		try {
-			for (final Connectable connectable : destinations.values()) {
-				if (connectable != this) {
-					nonSelfDestinations.add(connectable);
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-		return nonSelfDestinations;
-	}
-
-	public Set<Connectable> getDestinations(final Relationship relationship) {
-		readLock.lock();
-		try {
-			final Set<Connectable> destinationSet = new HashSet<>();
-			final Set<Connection> relationshipConnections = connections.get(relationship);
-			if (relationshipConnections != null) {
-				for (final Connection connection : relationshipConnections) {
-					destinationSet.add(destinations.get(connection));
-				}
-			}
-			return destinationSet;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	public Set<Relationship> getUndefinedRelationships() {
-		final Set<Relationship> undefined = new HashSet<>();
-		readLock.lock();
-		try {
-			final Set<Relationship> relationships;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				relationships = processor.getRelationships();
-			}
-
-			if (relationships == null) {
-				return undefined;
-			}
-			for (final Relationship relation : relationships) {
-				final Set<Connection> connectionSet = this.connections.get(relation);
-				if (connectionSet == null || connectionSet.isEmpty()) {
-					undefined.add(relation);
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-		return undefined;
-	}
-
-	/**
-	 * Determines if the given node is a destination for this node
-	 *
-	 * @param node node
-	 * @return true if is a direct destination node; false otherwise
-	 */
-	boolean isRelated(final ProcessorNode node) {
-		readLock.lock();
-		try {
-			return this.destinations.containsValue(node);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isRunning() {
-		readLock.lock();
-		try {
-			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public int getActiveThreadCount() {
-		readLock.lock();
-		try {
-			return processScheduler.getActiveThreadCount(this);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isValid() {
-		readLock.lock();
-		try {
-			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-			final Collection<ValidationResult> validationResults;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				validationResults = getProcessor().validate(validationContext);
-			}
-
-			for (final ValidationResult result : validationResults) {
-				if (!result.isValid()) {
-					return false;
-				}
-			}
-
-			for (final Relationship undef : getUndefinedRelationships()) {
-				if (!isAutoTerminated(undef)) {
-					return false;
-				}
-			}
-
-			switch (getInputRequirement()) {
-				case INPUT_ALLOWED:
-					break;
-				case INPUT_FORBIDDEN: {
-					if (!getIncomingConnections().isEmpty()) {
-						return false;
-					}
-					break;
-				}
-				case INPUT_REQUIRED: {
-					if (getIncomingConnections().isEmpty()) {
-						return false;
-					}
-					break;
-				}
-			}
-		} catch (final Throwable t) {
-			return false;
-		} finally {
-			readLock.unlock();
-		}
-
-		return true;
-	}
-
-	@Override
-	public Collection<ValidationResult> getValidationErrors() {
-		final List<ValidationResult> results = new ArrayList<>();
-		readLock.lock();
-		try {
-			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-			final Collection<ValidationResult> validationResults;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				validationResults = getProcessor().validate(validationContext);
-			}
-
-			for (final ValidationResult result : validationResults) {
-				if (!result.isValid()) {
-					results.add(result);
-				}
-			}
-
-			for (final Relationship relationship : getUndefinedRelationships()) {
-				if (!isAutoTerminated(relationship)) {
-					final ValidationResult error = new ValidationResult.Builder()
-						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
-						.subject("Relationship " + relationship.getName())
-						.valid(false)
-						.build();
-					results.add(error);
-				}
-			}
-
-			switch (getInputRequirement()) {
-				case INPUT_ALLOWED:
-					break;
-				case INPUT_FORBIDDEN: {
-					final int incomingConnCount = getIncomingConnections().size();
-					if (incomingConnCount != 0) {
-						results.add(new ValidationResult.Builder()
-							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
-							.subject("Incoming Connections")
-							.valid(false)
-							.build());
-					}
-					break;
-				}
-				case INPUT_REQUIRED: {
-					if (getIncomingConnections().isEmpty()) {
-						results.add(new ValidationResult.Builder()
-							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
-							.subject("Incoming Connections")
-							.valid(false)
-							.build());
-					}
-					break;
-				}
-			}
-		} catch (final Throwable t) {
-			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
-		} finally {
-			readLock.unlock();
-		}
-		return results;
-	}
-
-	@Override
-	public Requirement getInputRequirement() {
-		return inputRequirement;
-	}
-
-	/**
-	 * Establishes node equality (based on the processor's identifier)
-	 *
-	 * @param other node
-	 * @return true if equal
-	 */
-	@Override
-	public boolean equals(final Object other) {
-		if (!(other instanceof ProcessorNode)) {
-			return false;
-		}
-		final ProcessorNode on = (ProcessorNode) other;
-		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
-	}
-
-	@Override
-	public int hashCode() {
-		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
-	}
-
-	@Override
-	public Collection<Relationship> getRelationships() {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			return getProcessor().getRelationships();
-		}
-	}
-
-	@Override
-	public String toString() {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			return getProcessor().toString();
-		}
-	}
-
-	@Override
-	public ProcessGroup getProcessGroup() {
-		return processGroup.get();
-	}
-
-	@Override
-	public void setProcessGroup(final ProcessGroup group) {
-		writeLock.lock();
-		try {
-			this.processGroup.set(group);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			processor.onTrigger(context, sessionFactory);
-		}
-	}
-
-	@Override
-	public ConnectableType getConnectableType() {
-		return ConnectableType.PROCESSOR;
-	}
-
-	@Override
-	public void setScheduledState(final ScheduledState scheduledState) {
-		this.scheduledState.set(scheduledState);
-		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
-			yieldExpiration.set(0L);
-		}
-	}
-
-	@Override
-	public void setAnnotationData(final String data) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
-			}
-
-			this.annotationData.set(data);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public String getAnnotationData() {
-		return annotationData.get();
-	}
-
-	@Override
-	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
-		return getValidationErrors();
-	}
-
-	@Override
-	public void verifyCanDelete() throws IllegalStateException {
-		verifyCanDelete(false);
-	}
-
-	@Override
-	public void verifyCanDelete(final boolean ignoreConnections) {
-		readLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException(this + " is running");
-			}
-
-			if (!ignoreConnections) {
-				for (final Set<Connection> connectionSet : connections.values()) {
-					for (final Connection connection : connectionSet) {
-						connection.verifyCanDelete();
-					}
-				}
-
-				for (final Connection connection : incomingConnectionsRef.get()) {
-					if (connection.getSource().equals(this)) {
-						connection.verifyCanDelete();
-					} else {
-						throw new IllegalStateException(this + " is the destination of another component");
-					}
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanStart() {
-		readLock.lock();
-		try {
-			switch (getScheduledState()) {
-				case DISABLED:
-					throw new IllegalStateException(this + " cannot be started because it is disabled");
-				case RUNNING:
-					throw new IllegalStateException(this + " cannot be started because it is already running");
-				case STOPPED:
-					break;
-			}
-			verifyNoActiveThreads();
-
-			if (!isValid()) {
-				throw new IllegalStateException(this + " is not in a valid state");
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
-		switch (getScheduledState()) {
-			case DISABLED:
-				throw new IllegalStateException(this + " cannot be started because it is disabled");
-			case RUNNING:
-				throw new IllegalStateException(this + " cannot be started because it is already running");
-			case STOPPED:
-				break;
-		}
-		verifyNoActiveThreads();
-
-		final Set<String> ids = new HashSet<>();
-		for (final ControllerServiceNode node : ignoredReferences) {
-			ids.add(node.getIdentifier());
-		}
-
-		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
-		for (final ValidationResult result : validationResults) {
-			if (!result.isValid()) {
-				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
-			}
-		}
-	}
-
-	@Override
-	public void verifyCanStop() {
-		if (getScheduledState() != ScheduledState.RUNNING) {
-			throw new IllegalStateException(this + " is not scheduled to run");
-		}
-	}
-
-	@Override
-	public void verifyCanUpdate() {
-		readLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException(this + " is not stopped");
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanEnable() {
-		readLock.lock();
-		try {
-			if (getScheduledState() != ScheduledState.DISABLED) {
-				throw new IllegalStateException(this + " is not disabled");
-			}
-
-			verifyNoActiveThreads();
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanDisable() {
-		readLock.lock();
-		try {
-			if (getScheduledState() != ScheduledState.STOPPED) {
-				throw new IllegalStateException(this + " is not stopped");
-			}
-			verifyNoActiveThreads();
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	private void verifyNoActiveThreads() throws IllegalStateException {
-		final int threadCount = processScheduler.getActiveThreadCount(this);
-		if (threadCount > 0) {
-			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
-		}
-	}
-
-	@Override
-	public void verifyModifiable() throws IllegalStateException {
-		if (isRunning()) {
-			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-		}
-	}
+    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
+
+    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
+    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
+    private final AtomicReference<ProcessGroup> processGroup;
+    private final Processor processor;
+    private final AtomicReference<String> identifier;
+    private final Map<Connection, Connectable> destinations;
+    private final Map<Relationship, Set<Connection>> connections;
+    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
+    private final AtomicReference<List<Connection>> incomingConnectionsRef;
+    private final ReentrantReadWriteLock rwLock;
+    private final Lock readLock;
+    private final Lock writeLock;
+    private final AtomicBoolean isolated;
+    private final AtomicBoolean lossTolerant;
+    private final AtomicReference<ScheduledState> scheduledState;
+    private final AtomicReference<String> comments;
+    private final AtomicReference<String> name;
+    private final AtomicReference<Position> position;
+    private final AtomicReference<String> annotationData;
+    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
+    private final AtomicReference<String> yieldPeriod;
+    private final AtomicReference<String> penalizationPeriod;
+    private final AtomicReference<Map<String, String>> style;
+    private final AtomicInteger concurrentTaskCount;
+    private final AtomicLong yieldExpiration;
+    private final AtomicLong schedulingNanos;
+    private final boolean triggerWhenEmpty;
+    private final boolean sideEffectFree;
+    private final boolean triggeredSerially;
+    private final boolean triggerWhenAnyDestinationAvailable;
+    private final boolean eventDrivenSupported;
+    private final boolean batchSupported;
+    private final Requirement inputRequirement;
+    private final ValidationContextFactory validationContextFactory;
+    private final ProcessScheduler processScheduler;
+    private long runNanos = 0L;
+
+    private SchedulingStrategy schedulingStrategy; // guarded by read/write lock
+
+    @SuppressWarnings("deprecation")
+    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
+        final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
+        super(processor, uuid, validationContextFactory, controllerServiceProvider);
+
+        this.processor = processor;
+        identifier = new AtomicReference<>(uuid);
+        destinations = new HashMap<>();
+        connections = new HashMap<>();
+        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
+        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+        rwLock = new ReentrantReadWriteLock(false);
+        readLock = rwLock.readLock();
+        writeLock = rwLock.writeLock();
+        lossTolerant = new AtomicBoolean(false);
+        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
+        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
+        comments = new AtomicReference<>("");
+        name = new AtomicReference<>(processor.getClass().getSimpleName());
+        schedulingPeriod = new AtomicReference<>("0 sec");
+        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
+        yieldExpiration = new AtomicLong(0L);
+        concurrentTaskCount = new AtomicInteger(1);
+        position = new AtomicReference<>(new Position(0D, 0D));
+        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
+        this.processGroup = new AtomicReference<>();
+        processScheduler = scheduler;
+        annotationData = new AtomicReference<>();
+        isolated = new AtomicBoolean(false);
+        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
+
+        final Class<?> procClass = processor.getClass();
+        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
+        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
+        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
+        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
+        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
+            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
+        this.validationContextFactory = validationContextFactory;
+        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
+            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
+
+        final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
+        if (inputRequirementPresent) {
+            inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
+        } else {
+            inputRequirement = Requirement.INPUT_ALLOWED;
+        }
+
+        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
+    }
+
+    /**
+     * @return comments about this specific processor instance
+     */
+    @Override
+    public String getComments() {
+        return comments.get();
+    }
+
+    /**
+     * Provides and opportunity to retain information about this particular processor instance
+     *
+     * @param comments new comments
+     */
+    @Override
+    public void setComments(final String comments) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.comments.set(comments);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public ScheduledState getScheduledState() {
+        return scheduledState.get();
+    }
+
+    @Override
+    public Position getPosition() {
+        return position.get();
+    }
+
+    @Override
+    public void setPosition(Position position) {
+        this.position.set(position);
+    }
+
+    @Override
+    public Map<String, String> getStyle() {
+        return style.get();
+    }
+
+    @Override
+    public void setStyle(final Map<String, String> style) {
+        if (style != null) {
+            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
+        }
+    }
+
+    @Override
+    public String getIdentifier() {
+        return identifier.get();
+    }
+
+    /**
+     * @return if true flow file content generated by this processor is considered loss tolerant
+     */
+    @Override
+    public boolean isLossTolerant() {
+        return lossTolerant.get();
+    }
+
+    @Override
+    public boolean isIsolated() {
+        return isolated.get();
+    }
+
+    /**
+     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
+     */
+    @Override
+    public boolean isTriggerWhenEmpty() {
+        return triggerWhenEmpty;
+    }
+
+    /**
+     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
+     */
+    @Override
+    public boolean isSideEffectFree() {
+        return sideEffectFree;
+    }
+
+    @Override
+    public boolean isHighThroughputSupported() {
+        return batchSupported;
+    }
+
+    /**
+     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+     */
+    @Override
+    public boolean isTriggerWhenAnyDestinationAvailable() {
+        return triggerWhenAnyDestinationAvailable;
+    }
+
+    /**
+     * Indicates whether flow file content made by this processor must be persisted
+     *
+     * @param lossTolerant tolerant
+     */
+    @Override
+    public void setLossTolerant(final boolean lossTolerant) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.lossTolerant.set(lossTolerant);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Indicates whether the processor runs on only the primary node.
+     *
+     * @param isolated isolated
+     */
+    public void setIsolated(final boolean isolated) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.isolated.set(isolated);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isAutoTerminated(final Relationship relationship) {
+        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
+        if (terminatable == null) {
+            return false;
+        }
+        return terminatable.contains(relationship);
+    }
+
+    @Override
+    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+
+            for (final Relationship rel : terminate) {
+                if (!getConnections(rel).isEmpty()) {
+                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
+                }
+            }
+            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
+     */
+    @Override
+    public Set<Relationship> getAutoTerminatedRelationships() {
+        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
+        if (relationships == null) {
+            relationships = new HashSet<>();
+        }
+        return Collections.unmodifiableSet(relationships);
+    }
+
+    @Override
+    public String getName() {
+        return name.get();
+    }
+
+    /**
+     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
+     */
+    @SuppressWarnings("deprecation")
+    public String getProcessorDescription() {
+        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
+        String description = null;
+        if (capDesc != null) {
+            description = capDesc.value();
+        } else {
+            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
+            if (deprecatedCapDesc != null) {
+                description = deprecatedCapDesc.value();
+            }
+        }
+
+        return description;
+    }
+
+    @Override
+    public void setName(final String name) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.name.set(name);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
+     */
+    @Override
+    public long getSchedulingPeriod(final TimeUnit timeUnit) {
+        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+    }
+
+    @Override
+    public boolean isEventDrivenSupported() {
+        readLock.lock();
+        try {
+            return this.eventDrivenSupported;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    /**
+     * Updates the Scheduling Strategy used for this Processor
+     *
+     * @param schedulingStrategy strategy
+     *
+     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
+     */
+    @Override
+    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
+        writeLock.lock();
+        try {
+            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
+                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
+                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
+                // Mode. Instead, we will simply leave it in Timer-Driven mode
+                return;
+            }
+
+            this.schedulingStrategy = schedulingStrategy;
+            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @return the currently configured scheduling strategy
+     */
+    @Override
+    public SchedulingStrategy getSchedulingStrategy() {
+        readLock.lock();
+        try {
+            return this.schedulingStrategy;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public String getSchedulingPeriod() {
+        return schedulingPeriod.get();
+    }
+
+    @Override
+    public void setScheduldingPeriod(final String schedulingPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+
+            switch (schedulingStrategy) {
+                case CRON_DRIVEN: {
+                    try {
+                        new CronExpression(schedulingPeriod);
+                    } catch (final Exception e) {
+                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
+                    }
+                }
+                    break;
+                case PRIMARY_NODE_ONLY:
+                case TIMER_DRIVEN: {
+                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+                    if (schedulingNanos < 0) {
+                        throw new IllegalArgumentException("Scheduling Period must be positive");
+                    }
+                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+                }
+                    break;
+                case EVENT_DRIVEN:
+                default:
+                    return;
+            }
+
+            this.schedulingPeriod.set(schedulingPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public long getRunDuration(final TimeUnit timeUnit) {
+        readLock.lock();
+        try {
+            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
+        writeLock.lock();
+        try {
+            if (duration < 0) {
+                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
+            }
+
+            this.runNanos = timeUnit.toNanos(duration);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public long getYieldPeriod(final TimeUnit timeUnit) {
+        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+    }
+
+    @Override
+    public String getYieldPeriod() {
+        return yieldPeriod.get();
+    }
+
+    @Override
+    public void setYieldPeriod(final String yieldPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+            if (yieldMillis < 0) {
+                throw new IllegalArgumentException("Yield duration must be positive");
+            }
+            this.yieldPeriod.set(yieldPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+     * methods.
+     */
+    @Override
+    public void yield() {
+        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+        yield(yieldMillis, TimeUnit.MILLISECONDS);
+
+        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
+        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
+    }
+
+    @Override
+    public void yield(final long period, final TimeUnit timeUnit) {
+        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
+        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+
+        processScheduler.yield(this);
+    }
+
+    /**
+     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
+     */
+    @Override
+    public long getYieldExpiration() {
+        return yieldExpiration.get();
+    }
+
+    @Override
+    public long getPenalizationPeriod(final TimeUnit timeUnit) {
+        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+    }
+
+    @Override
+    public String getPenalizationPeriod() {
+        return penalizationPeriod.get();
+    }
+
+    @Override
+    public void setPenalizationPeriod(final String penalizationPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
+            if (penalizationMillis < 0) {
+                throw new IllegalArgumentException("Penalization duration must be positive");
+            }
+            this.penalizationPeriod.set(penalizationPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Determines the number of concurrent tasks that may be running for this processor.
+     *
+     * @param taskCount a number of concurrent tasks this processor may have running
+     * @throws IllegalArgumentException if the given value is less than 1
+     */
+    @Override
+    public void setMaxConcurrentTasks(final int taskCount) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
+                throw new IllegalArgumentException();
+            }
+            if (!triggeredSerially) {
+                concurrentTaskCount.set(taskCount);
+            }
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isTriggeredSerially() {
+        return triggeredSerially;
+    }
+
+    /**
+     * @return the number of tasks that may execute concurrently for this processor
+     */
+    @Override
+    public int getMaxConcurrentTasks() {
+        return concurrentTaskCount.get();
+    }
+
+    @Override
+    public LogLevel getBulletinLevel() {
+        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
+    }
+
+    @Override
+    public void setBulletinLevel(final LogLevel level) {
+        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
+    }
+
+    @Override
+    public Set<Connection> getConnections() {
+        final Set<Connection> allConnections = new HashSet<>();
+        readLock.lock();
+        try {
+            for (final Set<Connection> connectionSet : connections.values()) {
+                allConnections.addAll(connectionSet);
+            }
+        } finally {
+            readLock.unlock();
+        }
+
+        return allConnections;
+    }
+
+    @Override
+    public List<Connection> getIncomingConnections() {
+        return incomingConnectionsRef.get();
+    }
+
+    @Override
+    public Set<Connection> getConnections(final Relationship relationship) {
+        final Set<Connection> applicableConnections;
+        readLock.lock();
+        try {
+            applicableConnections = connections.get(relationship);
+        } finally {
+            readLock.unlock();
+        }
+        return (applicableConnections == null) ? Collections.<Connection> emptySet() : Collections.unmodifiableSet(applicableConnections);
+    }
+
+    @Override
+    public void addConnection(final Connection connection) {
+        Objects.requireNonNull(connection, "connection cannot be null");
+
+        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
+            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
+        }
+
+        writeLock.lock();
+        try {
+            List<Connection> updatedIncoming = null;
+            if (connection.getDestination().equals(this)) {
+                // don't add the connection twice. This may occur if we have a self-loop because we will be told
+                // to add the connection once because we are the source and again because we are the destination.
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                updatedIncoming = new ArrayList<>(incomingConnections);
+                if (!updatedIncoming.contains(connection)) {
+                    updatedIncoming.add(connection);
+                }
+            }
+
+            if (connection.getSource().equals(this)) {
+                // don't add the connection twice. This may occur if we have a self-loop because we will be told
+                // to add the connection once because we are the source and again because we are the destination.
+                if (!destinations.containsKey(connection)) {
+                    for (final Relationship relationship : connection.getRelationships()) {
+                        final Relationship rel = getRelationship(relationship.getName());
+                        Set<Connection> set = connections.get(rel);
+                        if (set == null) {
+                            set = new HashSet<>();
+                            connections.put(rel, set);
+                        }
+
+                        set.add(connection);
+
+                        destinations.put(connection, connection.getDestination());
+                    }
+
+                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+                    if (autoTerminated != null) {
+                        autoTerminated.removeAll(connection.getRelationships());
+                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
+                    }
+                }
+            }
+
+            if (updatedIncoming != null) {
+                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+            }
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean hasIncomingConnection() {
+        return !incomingConnectionsRef.get().isEmpty();
+    }
+
+    @Override
+    public void updateConnection(final Connection connection) throws IllegalStateException {
+        if (requireNonNull(connection).getSource().equals(this)) {
+            writeLock.lock();
+            try {
+                //
+                // update any relationships
+                //
+                // first check if any relations were removed.
+                final List<Relationship> existingRelationships = new ArrayList<>();
+                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
+                    if (entry.getValue().contains(connection)) {
+                        existingRelationships.add(entry.getKey());
+                    }
+                }
+
+                for (final Relationship rel : connection.getRelationships()) {
+                    if (!existingRelationships.contains(rel)) {
+                        // relationship was removed. Check if this is legal.
+                        final Set<Connection> connectionsForRelationship = getConnections(rel);
+                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
+                            // if we are running and we do not terminate undefined relationships and this is the only
+                            // connection that defines the given relationship, and that relationship is required,
+                            // then it is not legal to remove this relationship from this connection.
+                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
+                                + this + ", which is currently running");
+                        }
+                    }
+                }
+
+                // remove the connection from any list that currently contains
+                for (final Set<Connection> list : connections.values()) {
+                    list.remove(connection);
+                }
+
+                // add the connection in for all relationships listed.
+                for (final Relationship rel : connection.getRelationships()) {
+                    Set<Connection> set = connections.get(rel);
+                    if (set == null) {
+                        set = new HashSet<>();
+                        connections.put(rel, set);
+                    }
+                    set.add(connection);
+                }
+
+                // update to the new destination
+                destinations.put(connection, connection.getDestination());
+
+                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+                if (autoTerminated != null) {
+                    autoTerminated.removeAll(connection.getRelationships());
+                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
+                }
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (connection.getDestination().equals(this)) {
+            writeLock.lock();
+            try {
+                // update our incoming connections -- we can just remove & re-add the connection to
+                // update the list.
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+                updatedIncoming.remove(connection);
+                updatedIncoming.add(connection);
+                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+            } finally {
+                writeLock.unlock();
+            }
+        }
+    }
+
+    @Override
+    public void removeConnection(final Connection connection) {
+        boolean connectionRemoved = false;
+
+        if (requireNonNull(connection).getSource().equals(this)) {
+            for (final Relationship relationship : connection.getRelationships()) {
+                final Set<Connection> connectionsForRelationship = getConnections(relationship);
+                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
+                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
+                }
+            }
+
+            writeLock.lock();
+            try {
+                for (final Set<Connection> connectionList : this.connections.values()) {
+                    connectionList.remove(connection);
+                }
+
+                connectionRemoved = (destinations.remove(connection) != null);
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (connection.getDestination().equals(this)) {
+            writeLock.lock();
+            try {
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                if (incomingConnections.contains(connection)) {
+                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+                    updatedIncoming.remove(connection);
+                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+                    return;
+                }
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (!connectionRemoved) {
+            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
+        }
+    }
+
+    /**
+     * @param relationshipName name
+     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
+     */
+    @Override
+    public Relationship getRelationship(final String relationshipName) {
+        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
+        Relationship returnRel = specRel;
+
+        final Set<Relationship> relationships;
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            relationships = processor.getRelationships();
+        }
+
+        for (final Relationship rel : relationships) {
+            if (rel.equals(specRel)) {
+                returnRel = rel;
+                break;
+            }
+        }
+        return returnRel;
+    }
+
+    @Override
+    public Processor getProcessor() {
+        return this.processor;
+    }
+
+    /**
+     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
+     */
+    public Set<Connectable> getDestinations() {
+        final Set<Connectable> nonSelfDestinations = new HashSet<>();
+        readLock.lock();
+        try {
+            for (final Connectable connectable : destinations.values()) {
+                if (connectable != this) {
+                    nonSelfDestinations.add(connectable);
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+        return nonSelfDestinations;
+    }
+
+    public Set<Connectable> getDestinations(final Relationship relationship) {
+        readLock.lock();
+        try {
+            final Set<Connectable> destinationSet = new HashSet<>();
+            final Set<Connection> relationshipConnections = connections.get(relationship);
+            if (relationshipConnections != null) {
+                for (final Connection connection : relationshipConnections) {
+                    destinationSet.add(destinations.get(connection));
+                }
+            }
+            return destinationSet;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    public Set<Relationship> getUndefinedRelationships() {
+        final Set<Relationship> undefined = new HashSet<>();
+        readLock.lock();
+        try {
+            final Set<Relationship> relationships;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                relationships = processor.getRelationships();
+            }
+
+            if (relationships == null) {
+                return undefined;
+            }
+            for (final Relationship relation : relationships) {
+                final Set<Connection> connectionSet = this.connections.get(relation);
+                if (connectionSet == null || connectionSet.isEmpty()) {
+                    undefined.add(relation);
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+        return undefined;
+    }
+
+    /**
+     * Determines if the given node is a destination for this node
+     *
+     * @param node node
+     * @return true if is a direct destination node; false otherwise
+     */
+    boolean isRelated(final ProcessorNode node) {
+        readLock.lock();
+        try {
+            return this.destinations.containsValue(node);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isRunning() {
+        readLock.lock();
+        try {
+            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public int getActiveThreadCount() {
+        readLock.lock();
+        try {
+            return processScheduler.getActiveThreadCount(this);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isValid() {
+        readLock.lock();
+        try {
+            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+            final Collection<ValidationResult> validationResults;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                validationResults = getProcessor().validate(validationContext);
+            }
+
+            for (final ValidationResult result : validationResults) {
+                if (!result.isValid()) {
+                    return false;
+                }
+            }
+
+            for (final Relationship undef : getUndefinedRelationships()) {
+                if (!isAutoTerminated(undef)) {
+                    return false;
+                }
+            }
+
+            switch (getInputRequirement()) {
+                case INPUT_ALLOWED:
+                    break;
+                case INPUT_FORBIDDEN: {
+                    if (!getIncomingConnections().isEmpty()) {
+                        return false;
+                    }
+                    break;
+                }
+                case INPUT_REQUIRED: {
+                    if (getIncomingConnections().isEmpty()) {
+                        return false;
+                    }
+                    break;
+                }
+            }
+        } catch (final Throwable t) {
+            return false;
+        } finally {
+            readLock.unlock();
+        }
+
+        return true;
+    }
+
+    @Override
+    public Collection<ValidationResult> getValidationErrors() {
+        final List<ValidationResult> results = new ArrayList<>();
+        readLock.lock();
+        try {
+            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+            final Collection<ValidationResult> validationResults;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                validationResults = getProcessor().validate(validationContext);
+            }
+
+            for (final ValidationResult result : validationResults) {
+                if (!result.isValid()) {
+                    results.add(result);
+                }
+            }
+
+            for (final Relationship relationship : getUndefinedRelationships()) {
+                if (!isAutoTerminated(relationship)) {
+                    final ValidationResult error = new ValidationResult.Builder()
+                        .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
+                        .subject("Relationship " + relationship.getName())
+                        .valid(false)
+                        .build();
+                    results.add(error);
+                }
+            }
+
+            switch (getInputRequirement()) {
+                case INPUT_ALLOWED:
+                    break;
+                case INPUT_FORBIDDEN: {
+                    final int incomingConnCount = getIncomingConnections().size();
+                    if (incomingConnCount != 0) {
+                        results.add(new ValidationResult.Builder()
+                            .explanation("Processor is currently configured with " + incomingConnCount + " upstream connections but does not accept any upstream connections")
+                            .subject("Upstream Connections")
+                            .valid(false)
+                            .build());
+                    }
+                    break;
+                }
+                case INPUT_REQUIRED: {
+                    if (getIncomingConnections().isEmpty()) {
+                        results.add(new ValidationResult.Builder()
+                            .explanation("Processor requires an upstream connection but currently has none")
+                            .subject("Upstream Connections")
+                            .valid(false)
+                            .build());
+                    }
+                    break;
+                }
+            }
+        } catch (final Throwable t) {
+            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
+        } finally {
+            readLock.unlock();
+        }
+        return results;
+    }
+
+    @Override
+    public Requirement getInputRequirement() {
+        return inputRequirement;
+    }
+
+    /**
+     * Establishes node equality (based on the processor's identifier)
+     *
+     * @param other node
+     * @return true if equal
+     */
+    @Override
+    public boolean equals(final Object other) {
+        if (!(other instanceof ProcessorNode)) {
+            return false;
+        }
+        final ProcessorNode on = (ProcessorNode) other;
+        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
+    }
+
+    @Override
+    public int hashCode() {
+        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
+    }
+
+    @Override
+    public Collection<Relationship> getRelationships() {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            return getProcessor().getRelationships();
+        }
+    }
+
+    @Override
+    public String toString() {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            return getProcessor().toString();
+        }
+    }
+
+    @Override
+    public ProcessGroup getProcessGroup() {
+        return processGroup.get();
+    }
+
+    @Override
+    public void setProcessGroup(final ProcessGroup group) {
+        writeLock.lock();
+        try {
+            this.processGroup.set(group);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            processor.onTrigger(context, sessionFactory);
+        }
+    }
+
+    @Override
+    public ConnectableType getConnectableType() {
+        return ConnectableType.PROCESSOR;
+    }
+
+    @Override
+    public void setScheduledState(final ScheduledState scheduledState) {
+        this.scheduledState.set(scheduledState);
+        if (!scheduledState.equals(ScheduledState.RUNNING)) { // if user stops processor, clear yield expiration
+            yieldExpiration.set(0L);
+        }
+    }
+
+    @Override
+    public void setAnnotationData(final String data) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
+            }
+
+            this.annotationData.set(data);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public String getAnnotationData() {
+        return annotationData.get();
+    }
+
+    @Override
+    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
+        return getValidationErrors();
+    }
+
+    @Override
+    public void verifyCanDelete() throws IllegalStateException {
+        verifyCanDelete(false);
+    }
+
+    @Override
+    public void verifyCanDelete(final boolean ignoreConnections) {
+        readLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException(this + " is running");
+            }
+
+            if (!ignoreConnections) {
+                for (final Set<Connection> connectionSet : connections.values()) {
+                    for (final Connection connection : connectionSet) {
+                        connection.verifyCanDelete();
+                    }
+                }
+
+                for (final Connection connection : incomingConnectionsRef.get()) {
+                    if (connection.getSource().equals(this)) {
+                        connection.verifyCanDelete();
+                    } else {
+                        throw new IllegalStateException(this + " is the destination of another component");
+                    }
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanStart() {
+        readLock.lock();
+        try {
+            switch (getScheduledState()) {
+                case DISABLED:
+                    throw new IllegalStateException(this + " cannot be started because it is disabled");
+                case RUNNING:
+                    throw new IllegalStateException(this + " cannot be started because it is already running");
+                case STOPPED:
+                    break;
+            }
+            verifyNoActiveThreads();
+
+            if (!isValid()) {
+                throw new IllegalStateException(this + " is not in a valid state");
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
+        switch (getScheduledState()) {
+            case DISABLED:
+                throw new IllegalStateException(this + " cannot be started because it is disabled");
+            case RUNNING:
+                throw new IllegalStateException(this + " cannot be started because it is already running");
+            case STOPPED:
+                break;
+        }
+        verifyNoActiveThreads();
+
+        final Set<String> ids = new HashSet<>();
+        for (final ControllerServiceNode node : ignoredReferences) {
+            ids.add(node.getIdentifier());
+        }
+
+        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
+        for (final ValidationResult result : validationResults) {
+            if (!result.isValid()) {
+                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
+            }
+        }
+    }
+
+    @Override
+    public void verifyCanStop() {
+        if (getScheduledState() != ScheduledState.RUNNING) {
+            throw new IllegalStateException(this + " is not scheduled to run");
+        }
+    }
+
+    @Override
+    public void verifyCanUpdate() {
+        readLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException(this + " is not stopped");
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanEnable() {
+        readLock.lock();
+        try {
+            if (getScheduledState() != ScheduledState.DISABLED) {
+                throw new IllegalStateException(this + " is not disabled");
+            }
+
+            verifyNoActiveThreads();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanDisable() {
+        readLock.lock();
+        try {
+            if (getScheduledState() != ScheduledState.STOPPED) {
+                throw new IllegalStateException(this + " is not stopped");
+            }
+            verifyNoActiveThreads();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    private void verifyNoActiveThreads() throws IllegalStateException {
+        final int threadCount = processScheduler.getActiveThreadCount(this);
+        if (threadCount > 0) {
+            throw new IllegalStateException(this + " has " + threadCount + " threads still active");
+        }
+    }
+
+    @Override
+    public void verifyModifiable() throws IllegalStateException {
+        if (isRunning()) {
+            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+        }
+    }
 }


[06/19] nifi git commit: NIFI-810: - Adding basic support for preventing connection when appropriate. - Updating validation when [dis]connecting processors.

Posted by ma...@apache.org.
NIFI-810:
- Adding basic support for preventing connection when appropriate.
- Updating validation when [dis]connecting processors.

Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/2215bc84
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/2215bc84
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/2215bc84

Branch: refs/heads/master
Commit: 2215bc848b7db395b2ca9ac7cc4dc10891393721
Parents: 034ee6d
Author: Matt Gilman <ma...@gmail.com>
Authored: Fri Sep 25 17:46:58 2015 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Fri Sep 25 17:46:58 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/web/api/dto/ProcessorDTO.java    | 15 +++++++++++++++
 .../org/apache/nifi/controller/TemplateManager.java  |  1 +
 .../java/org/apache/nifi/web/api/dto/DtoFactory.java |  1 +
 .../src/main/webapp/js/nf/canvas/nf-actions.js       |  4 +++-
 .../src/main/webapp/js/nf/canvas/nf-canvas-utils.js  | 13 ++++++++++---
 .../js/nf/canvas/nf-connection-configuration.js      |  8 ++++++--
 6 files changed, 36 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
index c65c46a..866d77c 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
@@ -36,6 +36,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     private String description;
     private Boolean supportsParallelProcessing;
     private Boolean supportsEventDriven;
+    private String inputRequirement;
 
     private ProcessorConfigDTO config;
 
@@ -121,6 +122,20 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
+     * @return the input requirement of this processor
+     */
+    @ApiModelProperty(
+            value = "The input requirement for this processor."
+    )
+    public String getInputRequirement() {
+        return inputRequirement;
+    }
+
+    public void setInputRequirement(String inputRequirement) {
+        this.inputRequirement = inputRequirement;
+    }
+
+    /**
      * @return whether this processor supports event driven scheduling
      */
     @ApiModelProperty(

http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
index 7b8e173..a332e05 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
@@ -320,6 +320,7 @@ public class TemplateManager {
 
             // remove validation errors
             processorDTO.setValidationErrors(null);
+            processorDTO.setInputRequirement(null);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
index 76bce6f..16b114e 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
@@ -1402,6 +1402,7 @@ public final class DtoFactory {
         dto.setPosition(createPositionDto(node.getPosition()));
         dto.setStyle(node.getStyle());
         dto.setParentGroupId(node.getProcessGroup().getIdentifier());
+        dto.setInputRequirement(node.getInputRequirement().name());
 
         dto.setType(node.getProcessor().getClass().getCanonicalName());
         dto.setName(node.getName());

http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
index 3b47a8d..c6ef75f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
@@ -737,7 +737,9 @@ nf.Actions = (function () {
                             var destinationData = destination.datum();
 
                             // update the destination component accordingly
-                            if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                            if (nf.CanvasUtils.isProcessor(destination)) {
+                                nf.Processor.reload(destinationData.component);
+                            } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                                 nf.RemoteProcessGroup.reload(destinationData.component);
                             }
                         } else {

http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
index 9f56e30..1be551f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
@@ -1371,9 +1371,16 @@ nf.CanvasUtils = (function () {
                 return false;
             }
 
-            return nf.CanvasUtils.isProcessor(selection) || nf.CanvasUtils.isProcessGroup(selection) ||
-                    nf.CanvasUtils.isRemoteProcessGroup(selection) || nf.CanvasUtils.isOutputPort(selection) ||
-                    nf.CanvasUtils.isFunnel(selection);
+            if (nf.CanvasUtils.isProcessGroup(selection) || nf.CanvasUtils.isRemoteProcessGroup(selection) ||
+                    nf.CanvasUtils.isOutputPort(selection) || nf.CanvasUtils.isFunnel(selection)) {
+                return true;
+            }
+
+            // if processor, ensure it supports input
+            if (nf.CanvasUtils.isProcessor(selection)) {
+                var destinationData = selection.datum();
+                return destinationData.component.inputRequirement !== 'INPUT_FORBIDDEN';
+            }
         }
     };
 }());
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2215bc84/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
index cc246cf..1bafa7d 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
@@ -870,7 +870,9 @@ nf.ConnectionConfiguration = (function () {
                 }
 
                 // update the destination component accordingly
-                if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                if (nf.CanvasUtils.isProcessor(destination)) {
+                    nf.Processor.reload(destinationData.component);
+                } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                     nf.RemoteProcessGroup.reload(destinationData.component);
                 }
 
@@ -958,7 +960,9 @@ nf.ConnectionConfiguration = (function () {
                     }
 
                     // update the destination component accordingly
-                    if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                    if (nf.CanvasUtils.isProcessor(destination)) {
+                        nf.Processor.reload(destinationData.component);
+                    } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                         nf.RemoteProcessGroup.reload(destinationData.component);
                     }
                 }