You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by ma...@apache.org on 2015/10/07 23:53:21 UTC

[01/17] nifi git commit: nifi-992 Adding nifi-couchbase-bundle.

Repository: nifi
Updated Branches:
  refs/heads/NIFI-810-InputRequirement 2215bc848 -> 8e2308b78


nifi-992 Adding nifi-couchbase-bundle.

- new CouchbaseClusterControllerService
- new Processors
  - GetCouchbaseKey
  - PutCouchbaseKey

Signed-off-by: Bryan Bende <bb...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/2466a245
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/2466a245
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/2466a245

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 2466a24530f493238024eb22dc041eebe96621f3
Parents: 96764ed
Author: ijokarumawak <ij...@gmail.com>
Authored: Sat Sep 26 02:46:37 2015 +0900
Committer: Bryan Bende <bb...@apache.org>
Committed: Mon Sep 28 11:21:42 2015 -0400

----------------------------------------------------------------------
 nifi-assembly/NOTICE                            |  10 +
 nifi-assembly/pom.xml                           |   5 +
 .../nifi-couchbase-nar/pom.xml                  |  37 +++
 .../nifi-couchbase-processors/pom.xml           | 208 +++++++++++++++
 .../nifi/couchbase/CouchbaseAttributes.java     |  59 +++++
 .../CouchbaseClusterControllerService.java      |  38 +++
 .../nifi/couchbase/CouchbaseClusterService.java | 130 ++++++++++
 .../couchbase/AbstractCouchbaseProcessor.java   | 174 +++++++++++++
 .../nifi/processors/couchbase/DocumentType.java |  36 +++
 .../processors/couchbase/GetCouchbaseKey.java   | 172 +++++++++++++
 .../processors/couchbase/PutCouchbaseKey.java   | 164 ++++++++++++
 ...org.apache.nifi.controller.ControllerService |  15 ++
 .../org.apache.nifi.processor.Processor         |  16 ++
 .../couchbase/TestCouchbaseClusterService.java  |  59 +++++
 .../couchbase/TestGetCouchbaseKey.java          | 224 ++++++++++++++++
 .../couchbase/TestPutCouchbaseKey.java          | 254 +++++++++++++++++++
 nifi-nar-bundles/nifi-couchbase-bundle/pom.xml  |  35 +++
 nifi-nar-bundles/pom.xml                        |   1 +
 pom.xml                                         |   6 +
 19 files changed, 1643 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-assembly/NOTICE
----------------------------------------------------------------------
diff --git a/nifi-assembly/NOTICE b/nifi-assembly/NOTICE
index 3362740..1f7e3f1 100644
--- a/nifi-assembly/NOTICE
+++ b/nifi-assembly/NOTICE
@@ -709,6 +709,16 @@ The following binary components are provided under the Apache Software License v
         Metadata-Extractor
         Copyright 2002-2015 Drew Noakes
 
+    (ASLv2) Couchbase Java SDK
+      The following NOTICE information applies:
+        Couchbase Java SDK
+        Copyright 2014 Couchbase, Inc.
+
+    (ASLv2) RxJava
+      The following NOTICE information applies:
+        Couchbase Java SDK
+        Copyright 2012 Netflix, Inc.
+
 ************************
 Common Development and Distribution License 1.1
 ************************

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-assembly/pom.xml b/nifi-assembly/pom.xml
index f162588..de4fdcb 100644
--- a/nifi-assembly/pom.xml
+++ b/nifi-assembly/pom.xml
@@ -227,6 +227,11 @@ language governing permissions and limitations under the License. -->
             <artifactId>nifi-image-nar</artifactId>
             <type>nar</type>
         </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-couchbase-nar</artifactId>
+            <type>nar</type>
+        </dependency>
     </dependencies>
 
     <properties>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/pom.xml b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/pom.xml
new file mode 100644
index 0000000..4f58d1f
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/pom.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements. See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-couchbase-bundle</artifactId>
+        <version>0.3.1-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>nifi-couchbase-nar</artifactId>
+    <version>0.3.1-SNAPSHOT</version>
+    <packaging>nar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-couchbase-processors</artifactId>
+            <version>0.3.1-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
new file mode 100644
index 0000000..33b0baa
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
@@ -0,0 +1,208 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements. See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-couchbase-bundle</artifactId>
+        <version>0.3.1-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>nifi-couchbase-processors</artifactId>
+    <packaging>jar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-processor-utils</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.nifi</groupId>
+            <artifactId>nifi-mock</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-simple</artifactId>
+            <scope>test</scope>
+        </dependency>
+		<dependency>
+			<groupId>com.couchbase.client</groupId>
+			<artifactId>java-client</artifactId>
+			<version>2.2.0</version>
+		</dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.11</version>
+            <scope>test</scope>
+        </dependency>
+		<dependency>
+			<groupId>org.apache.nifi</groupId>
+			<artifactId>nifi-mock</artifactId>
+            <scope>test</scope>
+		</dependency>
+    </dependencies>
+	<build>
+		<pluginManagement>
+			<plugins>
+				<plugin>
+					<groupId>org.apache.maven.plugins</groupId>
+					<artifactId>maven-checkstyle-plugin</artifactId>
+					<version>2.15</version>
+					<dependencies>
+						<dependency>
+							<groupId>com.puppycrawl.tools</groupId>
+							<artifactId>checkstyle</artifactId>
+							<version>6.5</version>
+						</dependency>
+					</dependencies>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+		<plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+                <configuration>
+                    <checkstyleRules>
+                        <module name="Checker">
+                            <property name="charset" value="UTF-8" />
+                            <property name="severity" value="warning" />
+                            <!-- Checks for whitespace -->
+                            <!-- See http://checkstyle.sf.net/config_whitespace.html -->
+                            <module name="FileTabCharacter">
+                                <property name="eachLine" value="true" />
+                            </module>
+                            <module name="TreeWalker">
+                                <module name="RegexpSinglelineJava">
+                                    <property name="format" value="\s+$" />
+                                    <property name="message" value="Line has trailing whitespace." />
+                                </module>
+                                <module name="RegexpSinglelineJava">
+                                    <property name="format" value="[@]see\s+[{][@]link" />
+                                    <property name="message" value="Javadoc @see does not need @link: pick one or the other." />
+                                </module>
+                                <module name="OuterTypeFilename" />
+                                <module name="LineLength">
+                                    <!-- needs extra, because Eclipse formatter ignores the ending left
+                                        brace -->
+                                    <property name="max" value="200" />
+                                    <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://" />
+                                </module>
+                                <module name="AvoidStarImport" />
+                                <module name="UnusedImports">
+                                    <property name="processJavadoc" value="true" />
+                                </module>
+                                 <module name="NoLineWrap" />
+                                <module name="LeftCurly">
+                                    <property name="maxLineLength" value="160" />
+                                </module>
+                                <module name="RightCurly" />
+                                <module name="RightCurly">
+                                    <property name="option" value="alone" />
+                                    <property name="tokens" value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO, STATIC_INIT, INSTANCE_INIT" />
+                                </module>
+                                <module name="SeparatorWrap">
+                                    <property name="tokens" value="DOT" />
+                                    <property name="option" value="nl" />
+                                </module>
+                                <module name="SeparatorWrap">
+                                    <property name="tokens" value="COMMA" />
+                                    <property name="option" value="EOL" />
+                                </module>
+                                <module name="PackageName">
+                                    <property name="format" value="^[a-z]+(\.[a-z][a-zA-Z0-9]*)*$" />
+                                </module>
+                                <module name="MethodTypeParameterName">
+                                    <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)" />
+                                </module>
+                                <module name="MethodParamPad" />
+                                <module name="OperatorWrap">
+                                    <property name="option" value="NL" />
+                                    <property name="tokens" value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR, LT, MINUS, MOD, NOT_EQUAL, QUESTION, SL, SR, STAR " />
+                                </module>
+                                 <module name="AnnotationLocation">
+                                    <property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF" />
+                                </module>
+                                <module name="AnnotationLocation">
+                                    <property name="tokens" value="VARIABLE_DEF" />
+                                    <property name="allowSamelineMultipleAnnotations" value="true" />
+                                </module>
+                                <module name="NonEmptyAtclauseDescription" />
+                                <module name="JavadocMethod">
+                                    <property name="allowMissingJavadoc" value="true" />
+                                    <property name="allowMissingParamTags" value="true" />
+                                    <property name="allowMissingThrowsTags" value="true" />
+                                    <property name="allowMissingReturnTag" value="true" />
+                                    <property name="allowedAnnotations" value="Override,Test,BeforeClass,AfterClass,Before,After" />
+                                    <property name="allowThrowsTagsForSubclasses" value="true" />
+                                </module>
+                                <module name="SingleLineJavadoc" />
+                             </module>
+                        </module>
+                    </checkstyleRules>
+                    <violationSeverity>warning</violationSeverity>
+                    <includeTestSourceDirectory>true</includeTestSourceDirectory>
+                </configuration>
+	 		</plugin>
+ 		</plugins>
+	</build>
+    <profiles>
+        <profile>
+            <!-- Checks style and licensing requirements. This is a good idea to run
+                for contributions and for the release process. While it would be nice to
+                run always these plugins can considerably slow the build and have proven
+                to create unstable builds in our multi-module project and when building using
+                multiple threads. The stability issues seen with Checkstyle in multi-module
+                builds include false-positives and false negatives. -->
+            <id>contrib-check</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.rat</groupId>
+                        <artifactId>apache-rat-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                                <phase>verify</phase>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-checkstyle-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>check-style</id>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+</project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
new file mode 100644
index 0000000..a4d69fc
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.couchbase;
+
+import org.apache.nifi.flowfile.attributes.FlowFileAttributeKey;
+
+/**
+ * Couchbase related attribute keys.
+ */
+public enum CouchbaseAttributes implements FlowFileAttributeKey {
+
+    /**
+     * A reference to the related cluster.
+     */
+    Cluster("couchbase.cluster"),
+    /**
+     * A related bucket name.
+     */
+    Bucket("couchbase.bucket"),
+    /**
+     * The id of a related document.
+     */
+    DocId("couchbase.doc.id"),
+    /**
+     * The CAS value of a related document.
+     */
+    Cas("couchbase.doc.cas"),
+    /**
+     * The expiration of a related document.
+     */
+    Expiry("couchbase.doc.expiry"),
+    ;
+
+    private final String key;
+
+    private CouchbaseAttributes(final String key) {
+        this.key = key;
+    }
+
+    @Override
+    public String key() {
+        return key;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterControllerService.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterControllerService.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterControllerService.java
new file mode 100644
index 0000000..fcf72d5
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterControllerService.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.couchbase;
+
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.controller.ControllerService;
+
+import com.couchbase.client.java.Bucket;
+
+/**
+ * Provides a connection to a Couchbase Server cluster throughout a NiFi Data
+ * flow.
+ */
+@CapabilityDescription("Provides a centralized Couchbase connection.")
+public interface CouchbaseClusterControllerService extends ControllerService {
+
+    /**
+     * Open a bucket connection.
+     * @param bucketName the bucket name to access
+     * @return a connected bucket instance
+     */
+    public Bucket openBucket(String bucketName);
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterService.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterService.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterService.java
new file mode 100644
index 0000000..7daa97c
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseClusterService.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.couchbase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.reporting.InitializationException;
+
+import com.couchbase.client.core.CouchbaseException;
+import com.couchbase.client.java.Bucket;
+import com.couchbase.client.java.CouchbaseCluster;
+
+/**
+ * Provides a centralized Couchbase connection and bucket passwords management.
+ */
+@CapabilityDescription("Provides a centralized Couchbase connection and bucket passwords management."
+        + " Bucket passwords can be specified via dynamic properties.")
+@Tags({ "nosql", "couchbase", "database", "connection" })
+@DynamicProperty(name = "Bucket Password for BUCKET_NAME", value = "bucket password", description = "Specify bucket password if neseccery.")
+public class CouchbaseClusterService extends AbstractControllerService implements CouchbaseClusterControllerService {
+
+    public static final PropertyDescriptor CONNECTION_STRING = new PropertyDescriptor
+            .Builder().name("Connection String")
+            .description("The hostnames or ip addresses of the bootstraping nodes and optional parameters."
+                    + " Syntax) couchbase://node1,node2,nodeN?param1=value1&param2=value2&paramN=valueN")
+            .required(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+
+    private static final List<PropertyDescriptor> properties;
+
+    static {
+        final List<PropertyDescriptor> props = new ArrayList<>();
+        props.add(CONNECTION_STRING);
+
+        properties = Collections.unmodifiableList(props);
+    }
+
+    private static final String DYNAMIC_PROP_BUCKET_PASSWORD = "Bucket Password for ";
+    private static final Map<String, String> bucketPasswords = new HashMap<>();
+
+    private volatile CouchbaseCluster cluster;
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
+
+    @Override
+    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(
+            String propertyDescriptorName) {
+        if(propertyDescriptorName.startsWith(DYNAMIC_PROP_BUCKET_PASSWORD)){
+            return new PropertyDescriptor
+                    .Builder().name(propertyDescriptorName)
+                    .description("Bucket password.")
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .dynamic(true)
+                    .sensitive(true)
+                    .build();
+        }
+        return null;
+    }
+
+
+    /**
+     * Establish a connection to a Couchbase cluster.
+     * @param context the configuration context
+     * @throws InitializationException if unable to connect a Couchbase cluster
+     */
+    @OnEnabled
+    public void onConfigured(final ConfigurationContext context) throws InitializationException {
+
+        for(PropertyDescriptor p : context.getProperties().keySet()){
+            if(p.isDynamic() && p.getName().startsWith(DYNAMIC_PROP_BUCKET_PASSWORD)){
+                String bucketName = p.getName().substring(DYNAMIC_PROP_BUCKET_PASSWORD.length());
+                String password = context.getProperty(p).getValue();
+                bucketPasswords.put(bucketName, password);
+            }
+        }
+        try {
+            cluster = CouchbaseCluster.fromConnectionString(context.getProperty(CONNECTION_STRING).getValue());
+        } catch(CouchbaseException e) {
+            throw new InitializationException(e);
+        }
+    }
+
+    @Override
+    public Bucket openBucket(String bucketName){
+        return cluster.openBucket(bucketName, bucketPasswords.get(bucketName));
+    }
+
+    /**
+     * Disconnect from the Couchbase cluster.
+     */
+    @OnDisabled
+    public void shutdown() {
+        if(cluster != null){
+            cluster.disconnect();
+            cluster = null;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
new file mode 100644
index 0000000..d370728
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.util.StandardValidators;
+
+import com.couchbase.client.java.Bucket;
+
+/**
+ * Provides common functionalities for Couchbase processors.
+ */
+public abstract class AbstractCouchbaseProcessor extends AbstractProcessor {
+
+    public static final PropertyDescriptor DOCUMENT_TYPE = new PropertyDescriptor
+            .Builder().name("Document Type")
+            .description("The type of contents.")
+            .required(true)
+            .allowableValues(DocumentType.values())
+            .defaultValue(DocumentType.Json.toString())
+            .build();
+
+    public static final PropertyDescriptor DOC_ID = new PropertyDescriptor
+            .Builder().name("Static Document Id")
+            .description("A static, fixed Couchbase document id.")
+            .required(false)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .build();
+
+    public static final PropertyDescriptor DOC_ID_EXP = new PropertyDescriptor
+            .Builder().name("Document Id Expression")
+            .description("An expression to construct the Couchbase document id."
+                    + " If 'Static Document Id' is specified, then 'Static Document Id' is used.")
+            .required(false)
+            .expressionLanguageSupported(true)
+            .addValidator(StandardValidators.ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR)
+            .build();
+
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+            .name("success")
+            .description("All FlowFiles that are written to Couchbase Server are routed to this relationship.")
+            .build();
+    public static final Relationship REL_ORIGINAL = new Relationship.Builder()
+            .name("original")
+            .description("The original input file will be routed to this destination when it has been successfully processed.")
+            .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+            .name("failure")
+            .description("All FlowFiles that cannot written to Couchbase Server are routed to this relationship.")
+            .build();
+
+    public static final PropertyDescriptor COUCHBASE_CLUSTER_SERVICE = new PropertyDescriptor
+            .Builder().name("Couchbase Cluster Controller Service")
+            .description("A Couchbase Cluster Controller Service which manages connections to a Couchbase cluster.")
+            .required(true)
+            .identifiesControllerService(CouchbaseClusterControllerService.class)
+            .build();
+
+    public static final PropertyDescriptor BUCKET_NAME = new PropertyDescriptor
+            .Builder().name("Bucket Name")
+            .description("The name of bucket to access.")
+            .required(true)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .defaultValue("default")
+            .build();
+
+    private List<PropertyDescriptor> descriptors;
+
+    private Set<Relationship> relationships;
+
+    private CouchbaseClusterControllerService clusterService;
+
+    @Override
+    protected final void init(final ProcessorInitializationContext context) {
+
+        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
+        descriptors.add(COUCHBASE_CLUSTER_SERVICE);
+        descriptors.add(BUCKET_NAME);
+        addSupportedProperties(descriptors);
+        this.descriptors = Collections.unmodifiableList(descriptors);
+
+        final Set<Relationship> relationships = new HashSet<Relationship>();
+        addSupportedRelationships(relationships);
+        this.relationships = Collections.unmodifiableSet(relationships);
+
+    }
+
+    /**
+     * Add processor specific properties.
+     * @param descriptors add properties to this list
+     */
+    protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
+        return;
+    }
+
+    /**
+     * Add processor specific relationships.
+     * @param relationships add relationships to this list
+     */
+    protected void addSupportedRelationships(Set<Relationship> relationships) {
+        return;
+    }
+
+    @Override
+    public final Set<Relationship> getRelationships() {
+        return this.relationships;
+    }
+
+    @Override
+    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    private CouchbaseClusterControllerService getClusterService(final ProcessContext context) {
+        if(clusterService == null){
+            synchronized(AbstractCouchbaseProcessor.class){
+                if(clusterService == null){
+                    clusterService = context.getProperty(COUCHBASE_CLUSTER_SERVICE)
+                            .asControllerService(CouchbaseClusterControllerService.class);
+                }
+            }
+        }
+
+        return clusterService;
+    }
+
+    /**
+     * Open a bucket connection using a CouchbaseClusterControllerService.
+     * @param context a process context
+     * @return a bucket instance
+     */
+    protected final Bucket openBucket(final ProcessContext context) {
+        return getClusterService(context).openBucket(context.getProperty(BUCKET_NAME).getValue());
+    }
+
+    /**
+     * Generate a transit url.
+     * @param context a process context
+     * @return a transit url based on the bucket name and the CouchbaseClusterControllerService name
+     */
+    protected String getTransitUrl(final ProcessContext context) {
+        return new StringBuilder(context.getProperty(BUCKET_NAME).getValue())
+            .append('@')
+            .append(context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue())
+            .toString();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/DocumentType.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/DocumentType.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/DocumentType.java
new file mode 100644
index 0000000..81dd465
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/DocumentType.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+
+/**
+ * Supported Couchbase document types.
+ *
+ * In order to handle a variety type of document classes such as JsonDocument,
+ * JsonLongDocument or JsonStringDocument, Couchbase processors use
+ * RawJsonDocument for Json type.
+ *
+ * The distinction between Json and Binary exists because BinaryDocument doesn't
+ * set Json flag when it stored on Couchbase Server even if the content byte
+ * array represents a Json string, and it can't be retrieved as a Json document.
+ */
+public enum DocumentType {
+
+    Json,
+    Binary
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
new file mode 100644
index 0000000..6d9a476
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.behavior.ReadsAttributes;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.couchbase.CouchbaseAttributes;
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.stream.io.StreamUtils;
+
+import com.couchbase.client.java.Bucket;
+import com.couchbase.client.java.document.BinaryDocument;
+import com.couchbase.client.java.document.Document;
+import com.couchbase.client.java.document.RawJsonDocument;
+
+@Tags({ "nosql", "couchbase", "database", "get" })
+@CapabilityDescription("Get a document from Couchbase Server via Key/Value access.")
+@SeeAlso({CouchbaseClusterControllerService.class})
+@ReadsAttributes({
+    @ReadsAttribute(attribute = "FlowFile content", description = "Used as a document id if none of 'Static Document Id' or 'Document Id Expression' is specified"),
+    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id Excepression.")
+    })
+@WritesAttributes({
+    @WritesAttribute(attribute="couchbase.cluster", description="Cluster where the document was retrieved from."),
+    @WritesAttribute(attribute="couchbase.bucket", description="Bucket where the document was retrieved from."),
+    @WritesAttribute(attribute="couchbase.doc.id", description="Id of the document."),
+    @WritesAttribute(attribute="couchbase.doc.cas", description="CAS of the document."),
+    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document.")
+    })
+public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
+
+    @Override
+    protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
+        descriptors.add(DOCUMENT_TYPE);
+        descriptors.add(DOC_ID);
+        descriptors.add(DOC_ID_EXP);
+    }
+
+    @Override
+    protected void addSupportedRelationships(Set<Relationship> relationships) {
+        relationships.add(REL_SUCCESS);
+        relationships.add(REL_ORIGINAL);
+        relationships.add(REL_FAILURE);
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+        final ProcessorLog logger = getLogger();
+        FlowFile inFile = session.get();
+
+        String docId = null;
+        if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
+            docId = context.getProperty(DOC_ID).getValue();
+        } else {
+            // Otherwise docId has to be extracted from inFile.
+            if ( inFile == null ) {
+                return;
+            }
+            if(!StringUtils.isEmpty(context.getProperty(DOC_ID_EXP).getValue())){
+                docId = context.getProperty(DOC_ID_EXP).evaluateAttributeExpressions(inFile).getValue();
+            } else {
+                final byte[] content = new byte[(int) inFile.getSize()];
+                session.read(inFile, new InputStreamCallback() {
+                    @Override
+                    public void process(final InputStream in) throws IOException {
+                        StreamUtils.fillBuffer(in, content, true);
+                    }
+                });
+                docId = new String(content, StandardCharsets.UTF_8);
+            }
+        }
+
+        if(StringUtils.isEmpty(docId)){
+            logger.error("Couldn't get document id from from {}", new Object[]{inFile});
+            session.transfer(inFile, REL_FAILURE);
+        }
+
+        try {
+            Document<?> doc = null;
+            byte[] content = null;
+            Bucket bucket = openBucket(context);
+            DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
+            switch (documentType){
+                case Json : {
+                    RawJsonDocument document = bucket.get(docId, RawJsonDocument.class);
+                    if(document != null){
+                        content = document.content().getBytes(StandardCharsets.UTF_8);
+                        doc = document;
+                    }
+                    break;
+                }
+                case Binary : {
+                    BinaryDocument document = bucket.get(docId, BinaryDocument.class);
+                    if(document != null){
+                        content = document.content().array();
+                        doc = document;
+                    }
+                    break;
+                }
+            }
+
+            if(doc == null) {
+                logger.info("Document {} was not found in {}", new Object[]{docId, getTransitUrl(context)});
+                if(inFile != null){
+                    session.transfer(inFile, REL_FAILURE);
+                }
+                return;
+            }
+
+            if(inFile != null){
+                session.transfer(inFile, REL_ORIGINAL);
+            }
+
+            FlowFile outFile = session.create();
+            outFile = session.importFrom(new ByteArrayInputStream(content), outFile);
+            Map<String, String> updatedAttrs = new HashMap<>();
+            updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
+            updatedAttrs.put(CouchbaseAttributes.Bucket.key(), context.getProperty(BUCKET_NAME).getValue());
+            updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
+            updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
+            updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
+            outFile = session.putAllAttributes(outFile, updatedAttrs);
+            session.getProvenanceReporter().receive(outFile, getTransitUrl(context));
+            session.transfer(outFile, REL_SUCCESS);
+
+        } catch (Throwable t){
+            logger.error("Getting docuement {} from Couchbase Server using {} failed due to {}",
+                    new Object[]{docId, inFile, t}, t);
+            if(inFile != null){
+                session.transfer(inFile, REL_FAILURE);
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
new file mode 100644
index 0000000..6bfa480
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.behavior.ReadsAttributes;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.couchbase.CouchbaseAttributes;
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.stream.io.StreamUtils;
+
+import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
+import com.couchbase.client.deps.io.netty.buffer.Unpooled;
+import com.couchbase.client.java.PersistTo;
+import com.couchbase.client.java.ReplicateTo;
+import com.couchbase.client.java.document.BinaryDocument;
+import com.couchbase.client.java.document.Document;
+import com.couchbase.client.java.document.RawJsonDocument;
+
+@Tags({ "nosql", "couchbase", "database", "put" })
+@CapabilityDescription("Put a document to Couchbase Server via Key/Value access.")
+@SeeAlso({CouchbaseClusterControllerService.class})
+@ReadsAttributes({
+    @ReadsAttribute(attribute = "uuid", description = "Used as a document id if none of 'Static Document Id' or 'Document Id Expression' is specified"),
+    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id Excepression.")
+    })
+@WritesAttributes({
+    @WritesAttribute(attribute="couchbase.cluster", description="Cluster where the document was stored."),
+    @WritesAttribute(attribute="couchbase.bucket", description="Bucket where the document was stored."),
+    @WritesAttribute(attribute="couchbase.doc.id", description="Id of the document."),
+    @WritesAttribute(attribute="couchbase.doc.cas", description="CAS of the document."),
+    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document.")
+    })
+public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
+
+
+    public static final PropertyDescriptor PERSIST_TO = new PropertyDescriptor
+            .Builder().name("Persist To")
+            .description("Durability constraint about disk persistence.")
+            .required(true)
+            .allowableValues(PersistTo.values())
+            .defaultValue(PersistTo.NONE.toString())
+            .build();
+
+    public static final PropertyDescriptor REPLICATE_TO = new PropertyDescriptor
+            .Builder().name("Replicate To")
+            .description("Durability constraint about replication.")
+            .required(true)
+            .allowableValues(ReplicateTo.values())
+            .defaultValue(ReplicateTo.NONE.toString())
+            .build();
+
+    @Override
+    protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
+        descriptors.add(DOCUMENT_TYPE);
+        descriptors.add(DOC_ID);
+        descriptors.add(DOC_ID_EXP);
+        descriptors.add(PERSIST_TO);
+        descriptors.add(REPLICATE_TO);
+    }
+
+    @Override
+    protected void addSupportedRelationships(Set<Relationship> relationships) {
+        relationships.add(REL_SUCCESS);
+        relationships.add(REL_FAILURE);
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+        final ProcessorLog logger = getLogger();
+        FlowFile flowFile = session.get();
+        if ( flowFile == null ) {
+            return;
+        }
+
+        try {
+
+            final byte[] content = new byte[(int) flowFile.getSize()];
+            session.read(flowFile, new InputStreamCallback() {
+                @Override
+                public void process(final InputStream in) throws IOException {
+                    StreamUtils.fillBuffer(in, content, true);
+                }
+            });
+
+
+            String docId = String.valueOf(flowFile.getAttribute(CoreAttributes.UUID.key()));
+            if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
+                docId = context.getProperty(DOC_ID).getValue();
+            } else if(!StringUtils.isEmpty(context.getProperty(DOC_ID_EXP).getValue())){
+                docId = context.getProperty(DOC_ID_EXP).evaluateAttributeExpressions(flowFile).getValue();
+            }
+
+            Document<?> doc = null;
+            DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
+            switch (documentType){
+                case Json : {
+                    doc = RawJsonDocument.create(docId, new String(content, StandardCharsets.UTF_8));
+                    break;
+                }
+                case Binary : {
+                    ByteBuf buf = Unpooled.copiedBuffer(content);
+                    doc = BinaryDocument.create(docId, buf);
+                    break;
+                }
+            }
+
+
+            PersistTo persistTo = PersistTo.valueOf(context.getProperty(PERSIST_TO).getValue());
+            ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue());
+            doc = openBucket(context).upsert(doc, persistTo, replicateTo);
+            Map<String, String> updatedAttrs = new HashMap<>();
+            updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
+            updatedAttrs.put(CouchbaseAttributes.Bucket.key(), context.getProperty(BUCKET_NAME).getValue());
+            updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
+            updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
+            updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
+            flowFile = session.putAllAttributes(flowFile, updatedAttrs);
+            session.getProvenanceReporter().send(flowFile, getTransitUrl(context));
+            session.transfer(flowFile, REL_SUCCESS);
+
+        } catch (Throwable t) {
+            logger.error("Writing {} into Couchbase Server failed due to {}", new Object[]{flowFile, t}, t);
+            session.transfer(flowFile, REL_FAILURE);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
new file mode 100644
index 0000000..e5e3ea7
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+org.apache.nifi.couchbase.CouchbaseClusterService
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.processor.Processor
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.processor.Processor b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.processor.Processor
new file mode 100644
index 0000000..1304435
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/resources/META-INF/services/org.apache.nifi.processor.Processor
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+org.apache.nifi.processors.couchbase.GetCouchbaseKey
+org.apache.nifi.processors.couchbase.PutCouchbaseKey
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
new file mode 100644
index 0000000..d96b1c2
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.couchbase.CouchbaseClusterService;
+import org.apache.nifi.reporting.InitializationException;
+import org.apache.nifi.util.TestRunner;
+import org.apache.nifi.util.TestRunners;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+
+public class TestCouchbaseClusterService {
+
+    private static final String SERVICE_ID = "couchbaseClusterService";
+    private TestRunner testRunner;
+
+    @Before
+    public void init() throws Exception {
+        System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
+        System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.PutCouchbaseKey", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.couchbase.CouchbaseClusterService", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.couchbase.TestCouchbaseClusterService", "debug");
+
+        testRunner = TestRunners.newTestRunner(PutCouchbaseKey.class);
+        testRunner.setValidateExpressionUsage(false);
+    }
+
+    @Test
+    public void testConnectionFailure() throws InitializationException {
+        String connectionString = "couchbase://invalid-hostname";
+        CouchbaseClusterControllerService service = new CouchbaseClusterService();
+        testRunner.addControllerService(SERVICE_ID, service);
+        testRunner.setProperty(service, CouchbaseClusterService.CONNECTION_STRING, connectionString);
+        try {
+            testRunner.enableControllerService(service);
+            Assert.fail("The service shouldn't be enabled when it couldn't connect to a cluster.");
+        } catch (AssertionError e) {
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
new file mode 100644
index 0000000..4ea4dff
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.BUCKET_NAME;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.COUCHBASE_CLUSTER_SERVICE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOCUMENT_TYPE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID_EXP;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_ORIGINAL;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.nifi.couchbase.CouchbaseAttributes;
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.reporting.InitializationException;
+import org.apache.nifi.util.MockFlowFile;
+import org.apache.nifi.util.TestRunner;
+import org.apache.nifi.util.TestRunners;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.couchbase.client.core.ServiceNotAvailableException;
+import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
+import com.couchbase.client.deps.io.netty.buffer.Unpooled;
+import com.couchbase.client.java.Bucket;
+import com.couchbase.client.java.document.BinaryDocument;
+import com.couchbase.client.java.document.RawJsonDocument;
+
+
+public class TestGetCouchbaseKey {
+
+    private static final String SERVICE_ID = "couchbaseClusterService";
+    private TestRunner testRunner;
+
+    @Before
+    public void init() throws Exception {
+        System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
+        System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.GetCouchbaseKey", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.TestGetCouchbaseKey", "debug");
+
+        testRunner = TestRunners.newTestRunner(GetCouchbaseKey.class);
+        testRunner.setValidateExpressionUsage(false);
+    }
+
+    private void setupMockBucket(Bucket bucket) throws InitializationException {
+        CouchbaseClusterControllerService service = mock(CouchbaseClusterControllerService.class);
+        when(service.getIdentifier()).thenReturn(SERVICE_ID);
+        when(service.openBucket(anyString())).thenReturn(bucket);
+        testRunner.addControllerService(SERVICE_ID, service);
+        testRunner.enableControllerService(service);
+        testRunner.setProperty(COUCHBASE_CLUSTER_SERVICE, SERVICE_ID);
+    }
+
+    @Test
+    public void testStaticDocId() throws Exception {
+        String bucketName = "bucket-1";
+        String docId = "doc-a";
+
+        Bucket bucket = mock(Bucket.class);
+        String content = "{\"key\":\"value\"}";
+        int expiry = 100;
+        long cas = 200L;
+        when(bucket.get(docId, RawJsonDocument.class)).thenReturn(RawJsonDocument.create(docId, expiry, content, cas));
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(BUCKET_NAME, bucketName);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.run();
+
+        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(content);
+
+        outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
+        outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
+        outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
+        outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
+        outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
+    }
+
+
+    /**
+     * Use static document id even if doc id expression is set.
+     */
+    @Test
+    public void testStaticDocIdAndDocIdExp() throws Exception {
+        String docId = "doc-a";
+        String docIdExp = "${someProperty}";
+
+        Bucket bucket = mock(Bucket.class);
+        String content = "{\"key\":\"value\"}";
+        when(bucket.get(docId, RawJsonDocument.class)).thenReturn(RawJsonDocument.create(docId, content));
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.run();
+
+        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(content);
+    }
+
+    @Test
+    public void testDocIdExp() throws Exception {
+        String docIdExp = "${'someProperty'}";
+        String somePropertyValue = "doc-p";
+
+        Bucket bucket = mock(Bucket.class);
+        String content = "{\"key\":\"value\"}";
+        when(bucket.get(somePropertyValue, RawJsonDocument.class))
+            .thenReturn(RawJsonDocument.create(somePropertyValue, content));
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+
+        byte[] inFileData = "input FlowFile data".getBytes(StandardCharsets.UTF_8);
+        Map<String, String> properties = new HashMap<>();
+        properties.put("someProperty", somePropertyValue);
+        testRunner.enqueue(inFileData, properties);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(content);
+    }
+
+    @Test
+    public void testInputFlowFileContent() throws Exception {
+
+        Bucket bucket = mock(Bucket.class);
+        String inFileDataStr = "doc-in";
+        String content = "{\"key\":\"value\"}";
+        when(bucket.get(inFileDataStr, RawJsonDocument.class))
+            .thenReturn(RawJsonDocument.create(inFileDataStr, content));
+        setupMockBucket(bucket);
+
+
+        byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(content);
+    }
+
+    @Test
+    public void testBinaryDocument() throws Exception {
+
+        Bucket bucket = mock(Bucket.class);
+        String inFileDataStr = "doc-in";
+        String content = "binary";
+        ByteBuf buf = Unpooled.copiedBuffer(content.getBytes(StandardCharsets.UTF_8));
+        when(bucket.get(inFileDataStr, BinaryDocument.class))
+            .thenReturn(BinaryDocument.create(inFileDataStr, buf));
+        setupMockBucket(bucket);
+
+
+        byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.setProperty(DOCUMENT_TYPE, DocumentType.Binary.toString());
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(content);
+    }
+
+
+    @Test
+    public void testCouchbaseFailure() throws Exception {
+
+        Bucket bucket = mock(Bucket.class);
+        String inFileDataStr = "doc-in";
+        when(bucket.get(inFileDataStr, RawJsonDocument.class))
+            .thenThrow(new ServiceNotAvailableException());
+        setupMockBucket(bucket);
+
+
+        byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 1);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        outFile.assertContentEquals(inFileDataStr);
+    }
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
new file mode 100644
index 0000000..3995528
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.BUCKET_NAME;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.COUCHBASE_CLUSTER_SERVICE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID_EXP;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.nifi.couchbase.CouchbaseAttributes;
+import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.reporting.InitializationException;
+import org.apache.nifi.util.MockFlowFile;
+import org.apache.nifi.util.TestRunner;
+import org.apache.nifi.util.TestRunners;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+
+import com.couchbase.client.java.Bucket;
+import com.couchbase.client.java.PersistTo;
+import com.couchbase.client.java.ReplicateTo;
+import com.couchbase.client.java.document.RawJsonDocument;
+import com.couchbase.client.java.error.DurabilityException;
+
+
+public class TestPutCouchbaseKey {
+
+    private static final String SERVICE_ID = "couchbaseClusterService";
+    private TestRunner testRunner;
+
+    @Before
+    public void init() throws Exception {
+        System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
+        System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.PutCouchbaseKey", "debug");
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.TestPutCouchbaseKey", "debug");
+
+        testRunner = TestRunners.newTestRunner(PutCouchbaseKey.class);
+        testRunner.setValidateExpressionUsage(false);
+    }
+
+    private void setupMockBucket(Bucket bucket) throws InitializationException {
+        CouchbaseClusterControllerService service = mock(CouchbaseClusterControllerService.class);
+        when(service.getIdentifier()).thenReturn(SERVICE_ID);
+        when(service.openBucket(anyString())).thenReturn(bucket);
+        testRunner.addControllerService(SERVICE_ID, service);
+        testRunner.enableControllerService(service);
+        testRunner.setProperty(COUCHBASE_CLUSTER_SERVICE, SERVICE_ID);
+    }
+
+    @Test
+    public void testStaticDocId() throws Exception {
+        String bucketName = "bucket-1";
+        String docId = "doc-a";
+        int expiry = 100;
+        long cas = 200L;
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
+            .thenReturn(RawJsonDocument.create(docId, expiry, inFileData, cas));
+        setupMockBucket(bucket);
+
+        testRunner.enqueue(inFileDataBytes);
+        testRunner.setProperty(BUCKET_NAME, bucketName);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+
+        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(inFileData);
+        outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
+        outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
+        outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
+        outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
+        outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
+    }
+
+    @Test
+    public void testDurabilityConstraint() throws Exception {
+        String docId = "doc-a";
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE)))
+            .thenReturn(RawJsonDocument.create(docId, inFileData));
+        setupMockBucket(bucket);
+
+        testRunner.enqueue(inFileDataBytes);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.setProperty(PutCouchbaseKey.PERSIST_TO, PersistTo.MASTER.toString());
+        testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE));
+
+        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(inFileData);
+    }
+
+    /**
+     * Use static document id even if doc id expression is set.
+     */
+    @Test
+    public void testStaticDocIdAndDocIdExp() throws Exception {
+        String docId = "doc-a";
+        String docIdExp = "${someProperty}";
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
+            .thenReturn(RawJsonDocument.create(docId, inFileData));
+        setupMockBucket(bucket);
+
+        testRunner.enqueue(inFileDataBytes);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+
+        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(inFileData);
+    }
+
+    @Test
+    public void testDocIdExp() throws Exception {
+        String docIdExp = "${'someProperty'}";
+        String somePropertyValue = "doc-p";
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
+            .thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+
+        Map<String, String> properties = new HashMap<>();
+        properties.put("someProperty", somePropertyValue);
+        testRunner.enqueue(inFileDataBytes, properties);
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(inFileData);
+    }
+
+    @Test
+    public void testInputFlowFileUuid() throws Exception {
+
+        String uuid = "00029362-5106-40e8-b8a9-bf2cecfbc0d7";
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
+            .thenReturn(RawJsonDocument.create(uuid, inFileData));
+        setupMockBucket(bucket);
+
+        Map<String, String> properties = new HashMap<>();
+        properties.put(CoreAttributes.UUID.key(), uuid);
+        testRunner.enqueue(inFileDataBytes, properties);
+        testRunner.run();
+
+        ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
+        verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+        assertEquals(uuid, capture.getValue().id());
+
+        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
+        outFile.assertContentEquals(inFileData);
+    }
+
+
+    @Test
+    public void testCouchbaseFailure() throws Exception {
+
+        String docId = "doc-a";
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
+            .thenThrow(new DurabilityException());
+        setupMockBucket(bucket);
+
+        testRunner.enqueue(inFileDataBytes);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
+
+        testRunner.assertAllFlowFilesTransferred(REL_FAILURE);
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 1);
+        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        outFile.assertContentEquals(inFileData);
+    }
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/nifi-couchbase-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/pom.xml b/nifi-nar-bundles/nifi-couchbase-bundle/pom.xml
new file mode 100644
index 0000000..3654295
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/pom.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements. See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.nifi</groupId>
+        <artifactId>nifi-nar-bundles</artifactId>
+        <version>0.3.1-SNAPSHOT</version>
+    </parent>
+
+    <groupId>org.apache.nifi</groupId>
+    <artifactId>nifi-couchbase-bundle</artifactId>
+    <version>0.3.1-SNAPSHOT</version>
+    <packaging>pom</packaging>
+
+    <modules>
+        <module>nifi-couchbase-processors</module>
+        <module>nifi-couchbase-nar</module>
+    </modules>
+
+</project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/nifi-nar-bundles/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/pom.xml b/nifi-nar-bundles/pom.xml
index d51c9b6..841818a 100644
--- a/nifi-nar-bundles/pom.xml
+++ b/nifi-nar-bundles/pom.xml
@@ -45,6 +45,7 @@
         <module>nifi-ambari-bundle</module>
         <module>nifi-image-bundle</module>
         <module>nifi-avro-bundle</module>
+        <module>nifi-couchbase-bundle</module>
     </modules>
     <dependencyManagement>
         <dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2466a245/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7f68c32..1d5a857 100644
--- a/pom.xml
+++ b/pom.xml
@@ -908,6 +908,12 @@
             </dependency>
             <dependency>
                 <groupId>org.apache.nifi</groupId>
+                <artifactId>nifi-couchbase-nar</artifactId>
+                <version>0.3.1-SNAPSHOT</version>
+                <type>nar</type>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.nifi</groupId>
                 <artifactId>nifi-properties</artifactId>
                 <version>0.3.1-SNAPSHOT</version>
             </dependency>


[13/17] nifi git commit: NIFI-810: - Adding basic support for preventing connection when appropriate. - Updating validation when [dis]connecting processors.

Posted by ma...@apache.org.
NIFI-810:
- Adding basic support for preventing connection when appropriate.
- Updating validation when [dis]connecting processors.


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/13edcfda
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/13edcfda
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/13edcfda

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 13edcfda2ef830e1b160b31f7cd3bea874ccd3f0
Parents: 4afd8f8
Author: Matt Gilman <ma...@gmail.com>
Authored: Fri Sep 25 17:46:58 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/web/api/dto/ProcessorDTO.java    | 15 +++++++++++++++
 .../org/apache/nifi/controller/TemplateManager.java  |  1 +
 .../java/org/apache/nifi/web/api/dto/DtoFactory.java |  1 +
 .../src/main/webapp/js/nf/canvas/nf-actions.js       |  4 +++-
 .../src/main/webapp/js/nf/canvas/nf-canvas-utils.js  | 13 ++++++++++---
 .../js/nf/canvas/nf-connection-configuration.js      |  8 ++++++--
 6 files changed, 36 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
index c65c46a..866d77c 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/ProcessorDTO.java
@@ -36,6 +36,7 @@ public class ProcessorDTO extends NiFiComponentDTO {
     private String description;
     private Boolean supportsParallelProcessing;
     private Boolean supportsEventDriven;
+    private String inputRequirement;
 
     private ProcessorConfigDTO config;
 
@@ -121,6 +122,20 @@ public class ProcessorDTO extends NiFiComponentDTO {
     }
 
     /**
+     * @return the input requirement of this processor
+     */
+    @ApiModelProperty(
+            value = "The input requirement for this processor."
+    )
+    public String getInputRequirement() {
+        return inputRequirement;
+    }
+
+    public void setInputRequirement(String inputRequirement) {
+        this.inputRequirement = inputRequirement;
+    }
+
+    /**
      * @return whether this processor supports event driven scheduling
      */
     @ApiModelProperty(

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
index 7b8e173..a332e05 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/TemplateManager.java
@@ -320,6 +320,7 @@ public class TemplateManager {
 
             // remove validation errors
             processorDTO.setValidationErrors(null);
+            processorDTO.setInputRequirement(null);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
index 76bce6f..16b114e 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
@@ -1402,6 +1402,7 @@ public final class DtoFactory {
         dto.setPosition(createPositionDto(node.getPosition()));
         dto.setStyle(node.getStyle());
         dto.setParentGroupId(node.getProcessGroup().getIdentifier());
+        dto.setInputRequirement(node.getInputRequirement().name());
 
         dto.setType(node.getProcessor().getClass().getCanonicalName());
         dto.setName(node.getName());

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
index 3b47a8d..c6ef75f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-actions.js
@@ -737,7 +737,9 @@ nf.Actions = (function () {
                             var destinationData = destination.datum();
 
                             // update the destination component accordingly
-                            if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                            if (nf.CanvasUtils.isProcessor(destination)) {
+                                nf.Processor.reload(destinationData.component);
+                            } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                                 nf.RemoteProcessGroup.reload(destinationData.component);
                             }
                         } else {

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
index 9f56e30..1be551f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-canvas-utils.js
@@ -1371,9 +1371,16 @@ nf.CanvasUtils = (function () {
                 return false;
             }
 
-            return nf.CanvasUtils.isProcessor(selection) || nf.CanvasUtils.isProcessGroup(selection) ||
-                    nf.CanvasUtils.isRemoteProcessGroup(selection) || nf.CanvasUtils.isOutputPort(selection) ||
-                    nf.CanvasUtils.isFunnel(selection);
+            if (nf.CanvasUtils.isProcessGroup(selection) || nf.CanvasUtils.isRemoteProcessGroup(selection) ||
+                    nf.CanvasUtils.isOutputPort(selection) || nf.CanvasUtils.isFunnel(selection)) {
+                return true;
+            }
+
+            // if processor, ensure it supports input
+            if (nf.CanvasUtils.isProcessor(selection)) {
+                var destinationData = selection.datum();
+                return destinationData.component.inputRequirement !== 'INPUT_FORBIDDEN';
+            }
         }
     };
 }());
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/13edcfda/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
index cc246cf..1bafa7d 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-ui/src/main/webapp/js/nf/canvas/nf-connection-configuration.js
@@ -870,7 +870,9 @@ nf.ConnectionConfiguration = (function () {
                 }
 
                 // update the destination component accordingly
-                if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                if (nf.CanvasUtils.isProcessor(destination)) {
+                    nf.Processor.reload(destinationData.component);
+                } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                     nf.RemoteProcessGroup.reload(destinationData.component);
                 }
 
@@ -958,7 +960,9 @@ nf.ConnectionConfiguration = (function () {
                     }
 
                     // update the destination component accordingly
-                    if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
+                    if (nf.CanvasUtils.isProcessor(destination)) {
+                        nf.Processor.reload(destinationData.component);
+                    } else if (nf.CanvasUtils.isRemoteProcessGroup(destination)) {
                         nf.RemoteProcessGroup.reload(destinationData.component);
                     }
                 }


[07/17] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index 68155d1..98a56bf 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -16,33 +16,7 @@
  */
 package org.apache.nifi.processors.standard;
 
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.processor.DataUnit;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.Relationship;
 import java.io.BufferedWriter;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.Validator;
-import org.apache.nifi.expression.AttributeValueDecorator;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
-import org.apache.nifi.logging.ProcessorLog;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.io.OutputStreamCallback;
-import org.apache.nifi.processor.io.StreamCallback;
-import org.apache.nifi.processor.util.FlowFileFilters;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.processors.standard.util.NLKBufferedReader;
-import org.apache.nifi.util.StopWatch;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -58,9 +32,37 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.expression.AttributeValueDecorator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.DataUnit;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.processor.io.StreamCallback;
+import org.apache.nifi.processor.util.FlowFileFilters;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.standard.util.NLKBufferedReader;
+import org.apache.nifi.stream.io.StreamUtils;
+import org.apache.nifi.util.StopWatch;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of "
         + "the content that matches the Regular Expression with some alternate value.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
index 04a9c56..f68ac6c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java
@@ -39,12 +39,19 @@ import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -52,21 +59,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
-import org.apache.commons.lang3.StringUtils;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Text", "Regular Expression", "Update", "Change", "Replace", "Modify", "Regex", "Mapping"})
 @CapabilityDescription("Updates the content of a FlowFile by evaluating a Regular Expression against it and replacing the section of the content that "
         + "matches the Regular Expression with some alternate value provided in a mapping file.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
index 7055a8a..d681793 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java
@@ -29,6 +29,8 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -59,6 +61,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "routing", "Attribute Expression Language", "regexp", "regex", "Regular Expression", "Expression Language"})
 @CapabilityDescription("Routes FlowFiles based on their Attributes using the Attribute Expression Language")
 @DynamicProperty(name = "Relationship Name", value = "Attribute Expression Language", supportsExpressionLanguage = true, description = "Routes FlowFiles whose "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
index 937bc69..c63839c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java
@@ -29,10 +29,18 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.DynamicRelationship;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.expression.AttributeValueDecorator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
@@ -40,20 +48,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.DynamicRelationship;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.IntegerHolder;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"route", "content", "regex", "regular expression", "regexp"})
 @CapabilityDescription("Applies Regular Expressions to the content of a FlowFile and routes a copy of the FlowFile to each "
         + "destination whose Regular Expression matches. Regular Expressions are added as User-Defined Properties where the name "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
index 1f0fc7b..aa88827 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java
@@ -32,28 +32,31 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
-import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.file.monitor.LastModifiedMonitor;
+import org.apache.nifi.util.file.monitor.SynchronousFileWatcher;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"scan", "attributes", "search", "lookup"})
 @CapabilityDescription("Scans the specified attributes of FlowFiles, checking to see if any of their values are "
         + "present within the specified dictionary of terms")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
index 445249b..6fe8446 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java
@@ -35,11 +35,13 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -63,6 +65,7 @@ import org.apache.nifi.util.search.ahocorasick.SearchState;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"aho-corasick", "scan", "content", "byte sequence", "search", "find", "dictionary"})
 @CapabilityDescription("Scans the content of FlowFiles for terms that are found in a user-supplied dictionary. If a term is matched, the UTF-8 "
         + "encoded version of the term will be added to the FlowFile using the 'matching.term' attribute")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
index e5e90ea..7b1103f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java
@@ -26,13 +26,15 @@ import java.util.Set;
 import java.util.UUID;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -48,6 +50,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"segment", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Segments a FlowFile into multiple smaller segments on byte boundaries. Each segment is given the following attributes: "
         + "fragment.identifier, fragment.index, fragment.count, segment.original.filename; these attributes can then be used by the "
         + "MergeContent processor in order to reconstitute the original FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
index 3da1bd5..3cdf787 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java
@@ -33,14 +33,16 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
@@ -64,6 +66,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"content", "split", "binary"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits incoming FlowFiles by a specified byte sequence")
 @WritesAttributes({
     @WritesAttribute(attribute = "fragment.identifier", description = "All split FlowFiles produced from the same parent FlowFile will have the same randomly generated UUID added for this attribute"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
index a3a4ed8..dfd09a2 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java
@@ -16,12 +16,21 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -38,21 +47,16 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"json", "split", "jsonpath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a JSON File into multiple, separate FlowFiles for an array element specified by a JsonPath expression. "
         + "Each generated FlowFile is comprised of an element of the specified array and transferred to relationship 'split,' "
         + "with the original file transferred to the 'original' relationship. If the specified JsonPath is not found or "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
index 56bd729..e966880 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java
@@ -16,50 +16,53 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
-import org.apache.nifi.stream.io.ByteArrayOutputStream;
-import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
+import org.apache.nifi.stream.io.ByteArrayOutputStream;
+import org.apache.nifi.stream.io.ByteCountingInputStream;
 import org.apache.nifi.util.IntegerHolder;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.UUID;
-
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"split", "text"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a text file into multiple smaller text files on line boundaries, each having up to a configured number of lines")
 @WritesAttributes({
     @WritesAttribute(attribute = "text.line.count", description = "The number of lines of text from the original FlowFile that were copied to this FlowFile"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index 617fcbe..a8453bb 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -29,27 +29,28 @@ import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.XmlElementNotifier;
+import org.apache.nifi.stream.io.BufferedInputStream;
 import org.apache.nifi.util.BooleanHolder;
-
-import org.apache.commons.lang3.StringEscapeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
@@ -63,6 +64,7 @@ import org.xml.sax.XMLReader;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "split"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits an XML File into multiple separate FlowFiles, each comprising a child or descendant of the original root element")
 public class SplitXml extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
index fc4730c..e77dfc6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java
@@ -35,6 +35,8 @@ import javax.xml.transform.stream.StreamSource;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -62,6 +64,7 @@ import org.apache.nifi.util.Tuple;
 @SideEffectFree
 @SupportsBatching
 @Tags({"xml", "xslt", "transform"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Applies the provided XSLT file to the flowfile XML payload. A new FlowFile is created "
         + "with transformed content and is routed to the 'success' relationship. If the XSL transform "
         + "fails, the original FlowFile is routed to the 'failure' relationship")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
index ff4d936..e94853b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java
@@ -35,14 +35,16 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -67,6 +69,7 @@ import org.apache.nifi.util.ObjectHolder;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Unpack", "un-merge", "tar", "zip", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Unpacks the content of FlowFiles that have been packaged with one of several different Packaging Formats, emitting one to many "
         + "FlowFiles for each input FlowFile")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
index d505898..3693590 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java
@@ -31,6 +31,14 @@ import javax.xml.validation.Schema;
 import javax.xml.validation.SchemaFactory;
 import javax.xml.validation.Validator;
 
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -39,21 +47,15 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.BooleanHolder;
-
 import org.xml.sax.SAXException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"xml", "schema", "validation", "xsd"})
 @CapabilityDescription("Validates the contents of FlowFiles against a user-specified XML Schema file")
 public class ValidateXml extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
index dd81289..8cf5726 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
@@ -31,9 +31,13 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -57,11 +61,9 @@ import org.apache.nifi.search.Searchable;
 import org.apache.nifi.update.attributes.Action;
 import org.apache.nifi.update.attributes.Condition;
 import org.apache.nifi.update.attributes.Criteria;
-import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.FlowFilePolicy;
+import org.apache.nifi.update.attributes.Rule;
 import org.apache.nifi.update.attributes.serde.CriteriaSerDe;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 
 /**
  * This processor supports updating flowfile attributes and can do so
@@ -116,6 +118,7 @@ import org.apache.nifi.annotation.behavior.WritesAttribute;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"attributes", "modification", "update", "delete", "Attribute Expression Language"})
 @CapabilityDescription("Updates the Attributes for a FlowFile by using the Attribute Expression Language and/or deletes the attributes based on a regular expression")
 @DynamicProperty(name = "A FlowFile attribute to update", value = "The value to set it to", supportsExpressionLanguage = true,


[08/17] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
index b825972..39dc725 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java
@@ -28,11 +28,13 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.distributed.cache.client.Deserializer;
 import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"hash", "dupe", "duplicate", "dedupe"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Caches a value, computed from FlowFile attributes, for each incoming FlowFile and determines if the cached value has already been seen. "
         + "If so, routes the FlowFile to 'duplicate' with an attribute named 'original.identifier' that specifies the original FlowFile's"
         + "\"description\", which is specified in the <FlowFile Description> property. If the FlowFile is not determined to be a duplicate, the Processor "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
index afff3c4..73ada84 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java
@@ -32,9 +32,11 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.DynamicRelationship;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -57,6 +59,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @TriggerWhenAnyDestinationAvailable
 @Tags({"distribute", "load balance", "route", "round robin", "weighted"})
 @CapabilityDescription("Distributes FlowFiles to downstream processors based on a Distribution Strategy. If using the Round Robin "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
index 7400821..021a94f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java
@@ -21,7 +21,9 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -36,6 +38,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @EventDriven
 @SupportsBatching
 @Tags({"test", "load", "duplicate"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Intended for load testing, this processor will create the configured number of copies of each incoming FlowFile")
 public class DuplicateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
index 67c2214..de81fe5 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java
@@ -26,13 +26,20 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Base32InputStream;
 import org.apache.commons.codec.binary.Base32OutputStream;
-
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.commons.codec.binary.Hex;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -41,11 +48,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processors.standard.util.ValidatingBase32InputStream;
 import org.apache.nifi.processors.standard.util.ValidatingBase64InputStream;
@@ -55,6 +57,7 @@ import org.apache.nifi.util.StopWatch;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encode", "decode", "base64", "hex"})
 @CapabilityDescription("Encodes the FlowFile content in base64")
 public class EncodeContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
index 6492d0a..7b98189 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncryptContent.java
@@ -27,6 +27,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -55,6 +57,7 @@ import org.bouncycastle.jce.provider.BouncyCastleProvider;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"encryption", "decryption", "password", "JCE", "OpenPGP", "PGP", "GPG"})
 @CapabilityDescription("Encrypts or Decrypts a FlowFile using either symmetric encryption with a password and randomly generated salt, or asymmetric encryption using a public and secret key.")
 public class EncryptContent extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
index ad3120c..db60f13 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
@@ -16,13 +16,25 @@
  */
 package org.apache.nifi.processors.standard;
 
-import com.jayway.jsonpath.DocumentContext;
-import com.jayway.jsonpath.InvalidJsonException;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.PathNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -42,24 +54,16 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
+import com.jayway.jsonpath.DocumentContext;
+import com.jayway.jsonpath.InvalidJsonException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"JSON", "evaluate", "JsonPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more JsonPath expressions against the content of a FlowFile. "
         + "The results of those expressions are assigned to FlowFile Attributes or are written to the content of the FlowFile itself, "
         + "depending on configuration of the Processor. "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
index 80b1795..6b3c514 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java
@@ -49,40 +49,43 @@ import javax.xml.xpath.XPathExpressionException;
 import javax.xml.xpath.XPathFactory;
 import javax.xml.xpath.XPathFactoryConfigurationException;
 
-import net.sf.saxon.lib.NamespaceConstant;
-import net.sf.saxon.xpath.XPathEvaluator;
-
+import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.BufferedInputStream;
-import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.stream.io.BufferedInputStream;
+import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.lib.NamespaceConstant;
+import net.sf.saxon.xpath.XPathEvaluator;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Evaluates one or more XPaths against the content of a FlowFile. The results of those XPaths are assigned to "
         + "FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of the "
         + "Processor. XPaths are entered by adding user-defined properties; the name of the property maps to the Attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
index 3291b55..f8db8f8 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java
@@ -40,23 +40,15 @@ import javax.xml.transform.TransformerFactoryConfigurationError;
 import javax.xml.transform.sax.SAXSource;
 import javax.xml.transform.stream.StreamResult;
 
-import net.sf.saxon.s9api.DOMDestination;
-import net.sf.saxon.s9api.Processor;
-import net.sf.saxon.s9api.SaxonApiException;
-import net.sf.saxon.s9api.XQueryCompiler;
-import net.sf.saxon.s9api.XQueryEvaluator;
-import net.sf.saxon.s9api.XQueryExecutable;
-import net.sf.saxon.s9api.XdmItem;
-import net.sf.saxon.s9api.XdmNode;
-import net.sf.saxon.s9api.XdmValue;
-
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -78,10 +70,21 @@ import org.apache.nifi.util.ObjectHolder;
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
+import net.sf.saxon.s9api.DOMDestination;
+import net.sf.saxon.s9api.Processor;
+import net.sf.saxon.s9api.SaxonApiException;
+import net.sf.saxon.s9api.XQueryCompiler;
+import net.sf.saxon.s9api.XQueryEvaluator;
+import net.sf.saxon.s9api.XQueryExecutable;
+import net.sf.saxon.s9api.XdmItem;
+import net.sf.saxon.s9api.XdmNode;
+import net.sf.saxon.s9api.XdmValue;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"XML", "evaluate", "XPath", "XQuery"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription(
         "Evaluates one or more XQueries against the content of a FlowFile.  The results of those XQueries are assigned "
         + "to FlowFile Attributes or are written to the content of the FlowFile itself, depending on configuration of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
index c8a67a0..fd6bb05 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java
@@ -43,6 +43,8 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -60,6 +62,7 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.ArgumentUtils;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"command", "process", "source", "external", "invoke", "script"})
 @CapabilityDescription("Runs an operating system command specified by the user and writes the output of that command to a FlowFile. If the command is expected "
         + "to be long-running, the Processor can output the partial data on a specified interval. When this option is used, the output is expected to be in textual "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
index 45fd1a8..5e25bdd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java
@@ -30,6 +30,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -48,6 +50,7 @@ import org.apache.nifi.util.LongHolder;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({ "sql", "select", "jdbc", "query", "database" })
 @CapabilityDescription("Execute provided SQL select query. Query result will be converted to Avro format."
     + " Streaming is used so arbitrarily large result sets are supported. This processor can be scheduled to run on " +

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
index 633ce61..9bea6ba 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java
@@ -35,11 +35,13 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -119,6 +121,7 @@ import org.apache.nifi.stream.io.StreamUtils;
  */
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"command execution", "command", "stream", "execute"})
 @CapabilityDescription("Executes an external command on the contents of a flow file, and creates a new flow file with the results of the command.")
 @DynamicProperty(name = "An environment variable name", value = "An environment variable value", description = "These environment variables are passed to the process spawned by this Processor")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
index 29b9c20..9583b8e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java
@@ -34,6 +34,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -56,6 +58,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"evaluate", "extract", "Text", "Regular Expression", "regex"})
 @CapabilityDescription(
         "Evaluates one or more Regular Expressions against the content of a FlowFile.  "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
index aa1206a..4feee1b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java
@@ -26,6 +26,12 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.processor.AbstractProcessor;
@@ -34,15 +40,12 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"test", "random", "generate"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("This processor creates FlowFiles of random data and is used for load testing")
 public class GenerateFlowFile extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
index ff5b599..7c78faa 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java
@@ -20,17 +20,20 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessorInitializationContext;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"FTP", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an FTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
index 0fa9178..ced79cd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java
@@ -49,12 +49,14 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
@@ -70,6 +72,7 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"local", "files", "filesystem", "ingest", "ingress", "get", "source", "input"})
 @CapabilityDescription("Creates FlowFiles from files in a directory.  NiFi will ignore files it doesn't have at least read permissions for.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
index 7099552..48ca2de 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetHTTP.java
@@ -69,6 +69,8 @@ import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
 import org.apache.http.impl.client.BasicCredentialsProvider;
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -94,6 +96,7 @@ import org.apache.nifi.ssl.SSLContextService.ClientAuth;
 import org.apache.nifi.util.StopWatch;
 
 @Tags({"get", "fetch", "poll", "http", "https", "ingest", "source", "input"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches a file via HTTP")
 @WritesAttributes({
     @WritesAttribute(attribute = "filename", description = "The filename is set to the name of the file on the remote server"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
index 6be505a..0ba7f98 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSQueue.java
@@ -21,6 +21,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 
 import javax.jms.JMSException;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -34,6 +36,7 @@ import org.apache.nifi.processors.standard.util.JmsFactory;
 import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "queue", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Queue, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
index e7209cc..272c7ab 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetJMSTopic.java
@@ -41,6 +41,8 @@ import javax.jms.InvalidDestinationException;
 import javax.jms.JMSException;
 import javax.jms.Session;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -60,6 +62,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageConsumer;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"jms", "topic", "subscription", "durable", "non-durable", "listen", "get", "pull", "source", "consume", "consumer"})
 @CapabilityDescription("Pulls messages from a JMS Topic, creating a FlowFile for each JMS Message or bundle of messages, as configured")
 @SeeAlso(PutJMS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
index 7841bec..63256f3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java
@@ -21,11 +21,13 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -34,6 +36,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.FileTransfer;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
 @CapabilityDescription("Fetches files from an SFTP Server and creates FlowFiles from them")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
index 2583e88..49bad40 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
@@ -44,11 +44,13 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.AllowableValue;
@@ -75,6 +77,7 @@ import org.eclipse.jetty.util.ssl.SslContextFactory;
 
 import com.sun.jersey.api.client.ClientResponse.Status;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"http", "https", "request", "listen", "ingress", "web service"})
 @CapabilityDescription("Starts an HTTP Server and listens for HTTP Requests. For each request, creates a FlowFile and transfers to 'success'. "
         + "This Processor is designed to be used in conjunction with the HandleHttpResponse Processor in order to create a Web Service")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
index 6de3fe6..a4317dc 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
@@ -27,8 +27,10 @@ import java.util.regex.Pattern;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "response", "egress", "web service"})
 @CapabilityDescription("Sends an HTTP Response to the Requestor that generated a FlowFile. This Processor is designed to be used in conjunction with "
         + "the HandleHttpRequest in order to create a web service.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
index b3dbf83..a0c603c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashAttribute.java
@@ -33,11 +33,13 @@ import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -96,6 +98,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "hash"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Hashes together the key/value pairs of several FlowFile Attributes and adds the hash as a new attribute. "
         + "Optional properties are to be added such that the name of the property is the name of a FlowFile Attribute to consider "
         + "and the value of the property is a regular expression that, if matched by the attribute value, will cause that attribute "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
index 526754e..9885599 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HashContent.java
@@ -29,10 +29,12 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -50,6 +52,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hash", "content", "MD5", "SHA-1", "SHA-256"})
 @CapabilityDescription("Calculates a hash value for the Content of a FlowFile and puts that hash value on the FlowFile as an attribute whose name "
         + "is determined by the <Hash Attribute Name> property")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
index 5f16ff3..d09117d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java
@@ -24,11 +24,13 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
 import org.apache.nifi.logging.ProcessorLog;
@@ -65,6 +67,7 @@ import org.apache.tika.mime.MimeTypeException;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"compression", "gzip", "bzip2", "zip", "MIME", "mime.type", "file", "identify"})
 @CapabilityDescription("Attempts to identify the MIME Type used for a FlowFile. If the MIME Type can be identified, "
         + "an attribute with the name 'mime.type' is added with the value being the MIME Type. If the MIME Type cannot be determined, "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
index f16eb9c..a06b3d6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java
@@ -54,6 +54,8 @@ import javax.net.ssl.SSLSession;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -77,6 +79,7 @@ import org.joda.time.format.DateTimeFormatter;
 
 @SupportsBatching
 @Tags({"http", "https", "rest", "client"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("An HTTP client processor which converts FlowFile attributes to HTTP headers, with configurable HTTP method, url, etc.")
 @WritesAttributes({
     @WritesAttribute(attribute = "invokehttp.status.code", description = "The status code that is returned"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
index c7842d9..258e122 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
@@ -31,10 +31,14 @@ import java.util.regex.Pattern;
 import javax.servlet.Servlet;
 import javax.ws.rs.Path;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
-import org.apache.nifi.stream.io.StreamThrottler;
 import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
@@ -42,15 +46,12 @@ import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.servlets.ContentAcknowledgmentServlet;
 import org.apache.nifi.processors.standard.servlets.ListenHTTPServlet;
 import org.apache.nifi.ssl.SSLContextService;
-
+import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
+import org.apache.nifi.stream.io.StreamThrottler;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.HttpConfiguration;
 import org.eclipse.jetty.server.HttpConnectionFactory;
@@ -62,6 +63,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"ingest", "http", "https", "rest", "listen"})
 @CapabilityDescription("Starts an HTTP Server that is used to receive FlowFiles from remote sources. The URL of the Service will be http://{hostname}:{port}/contentListener")
 public class ListenHTTP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
index 6a88bd4..b620dd3 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java
@@ -41,6 +41,15 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.annotation.lifecycle.OnStopped;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -58,19 +67,11 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessSessionFactory;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.lifecycle.OnScheduled;
-import org.apache.nifi.annotation.lifecycle.OnStopped;
-import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.UDPStreamConsumer;
 import org.apache.nifi.util.Tuple;
 
-import org.apache.commons.lang3.StringUtils;
-
 /**
  * <p>
  * This processor listens for Datagram Packets on a given port and concatenates the contents of those packets together generating flow files roughly as often as the internal buffer fills up or until
@@ -113,6 +114,7 @@ import org.apache.commons.lang3.StringUtils;
  */
 @TriggerWhenEmpty
 @Tags({"ingest", "udp", "listen", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Listens for Datagram Packets on a given port and concatenates the contents of those packets "
         + "together generating flow files")
 public class ListenUDP extends AbstractSessionFactoryProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
index 6d0b643..5cd5b14 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java
@@ -27,6 +27,14 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
@@ -35,22 +43,16 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.eclipse.jetty.util.StringUtil;
 
 @EventDriven
 @SideEffectFree
 @SupportsBatching
 @Tags({"attributes", "logging"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class LogAttribute extends AbstractProcessor {
 
     public static final PropertyDescriptor LOG_LEVEL = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index e9258df..2cad11e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -48,15 +48,17 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
+import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
-import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
-import org.apache.nifi.annotation.behavior.WritesAttributes;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;
@@ -86,6 +88,7 @@ import org.apache.nifi.util.ObjectHolder;
 
 @SideEffectFree
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"merge", "content", "correlation", "tar", "zip", "stream", "concatenation", "archive", "flowfile-stream", "flowfile-stream-v3"})
 @CapabilityDescription("Merges a Group of FlowFiles together based on a user-defined strategy and packages them into a single FlowFile. "
         + "It is recommended that the Processor be configured with only a single incoming connection, as Group of FlowFiles will not be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
index be21b32..e0efa3d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java
@@ -25,28 +25,32 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.DataUnit;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.util.StopWatch;
 
 @EventDriven
 @SideEffectFree
 @Tags({"binary", "discard", "keep"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Keep or discard bytes range from a binary file.")
 public class ModifyBytes extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
index 2900623..426b792 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java
@@ -16,6 +16,22 @@
  */
 package org.apache.nifi.processors.standard;
 
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
@@ -36,23 +52,10 @@ import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
 @SideEffectFree
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"monitor", "flow", "active", "inactive", "activity", "detection"})
 @CapabilityDescription("Monitors the flow for activity and sends out an indicator when the flow has not had any data for "
         + "some specified amount of time and again when the flow's activity is restored")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
index 51f28e0..ef84629 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PostHTTP.java
@@ -82,6 +82,8 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.protocol.HttpCoreContext;
 import org.apache.http.util.EntityUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -121,6 +123,7 @@ import org.apache.nifi.util.StopWatch;
 import com.sun.jersey.api.client.ClientResponse.Status;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"http", "https", "remote", "copy", "archive"})
 @CapabilityDescription("Performs an HTTP Post with the content of the FlowFile")
 @ReadsAttribute(attribute = "mime.type", description = "If not sending data as a FlowFile, the mime.type attribute will be used to set the HTTP Header for Content-Type")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
index 7e2dd31..5605b8d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java
@@ -47,6 +47,8 @@ import javax.mail.internet.PreencodedMimeBodyPart;
 import javax.mail.util.ByteArrayDataSource;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -67,6 +69,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 
 @SupportsBatching
 @Tags({"email", "put", "notify", "smtp"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends an e-mail to configured recipients for each incoming FlowFile")
 public class PutEmail extends AbstractProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
index b959efa..1679982 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java
@@ -28,6 +28,8 @@ import java.util.regex.Pattern;
 
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.processors.standard.util.FTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "ftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an FTP Server")
 @SeeAlso(GetFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
index 3bbe093..8c4b00f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java
@@ -34,6 +34,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -52,6 +54,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"put", "local", "copy", "archive", "files", "filesystem"})
 @CapabilityDescription("Writes the contents of a FlowFile to the local file system")
 @SeeAlso(GetFile.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
index 034a3fc..dff5a6b 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutJMS.java
@@ -39,9 +39,9 @@ import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TTL
 import static org.apache.nifi.processors.standard.util.JmsProperties.MESSAGE_TYPE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_BYTE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_EMPTY;
+import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_STREAM;
 import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_TEXT;
-import static org.apache.nifi.processors.standard.util.JmsProperties.MSG_TYPE_MAP;
 import static org.apache.nifi.processors.standard.util.JmsProperties.PASSWORD;
 import static org.apache.nifi.processors.standard.util.JmsProperties.REPLY_TO_QUEUE;
 import static org.apache.nifi.processors.standard.util.JmsProperties.TIMEOUT;
@@ -70,6 +70,8 @@ import javax.jms.MessageProducer;
 import javax.jms.Session;
 import javax.jms.StreamMessage;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -89,6 +91,7 @@ import org.apache.nifi.processors.standard.util.WrappedMessageProducer;
 import org.apache.nifi.stream.io.StreamUtils;
 
 @Tags({"jms", "send", "put"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Creates a JMS Message from the contents of a FlowFile and sends the message to a JMS Server")
 @SeeAlso({GetJMSQueue.class, GetJMSTopic.class})
 public class PutJMS extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
index 97fe7e5..48cfc26 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -31,6 +33,7 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processors.standard.util.SFTPTransfer;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"remote", "copy", "egress", "put", "sftp", "archive", "files"})
 @CapabilityDescription("Sends FlowFiles to an SFTP Server")
 @SeeAlso(GetSFTP.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
index b087737..0913f86 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java
@@ -45,6 +45,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.ReadsAttributes;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
@@ -68,6 +70,7 @@ import org.apache.nifi.stream.io.StreamUtils;
 
 @SupportsBatching
 @SeeAlso(ConvertJSONToSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"sql", "put", "rdbms", "database", "update", "insert", "relational"})
 @CapabilityDescription("Executes a SQL UPDATE or INSERT command. The content of an incoming FlowFile is expected to be the SQL command "
         + "to execute. The SQL command may use the ? to escape parameters. In this case, the parameters to use must exist as FlowFile attributes "


[05/17] nifi git commit: NIFI-992 Adding NOTICE to nifi-couchbase-nar

Posted by ma...@apache.org.
NIFI-992 Adding NOTICE to nifi-couchbase-nar


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/6b2f5ad1
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/6b2f5ad1
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/6b2f5ad1

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 6b2f5ad1f59c6cd2ced5e7225454260cefbbbcce
Parents: 883333c
Author: Bryan Bende <bb...@apache.org>
Authored: Thu Oct 1 16:56:49 2015 -0400
Committer: Bryan Bende <bb...@apache.org>
Committed: Thu Oct 1 16:56:49 2015 -0400

----------------------------------------------------------------------
 .../src/main/resources/META-INF/NOTICE          | 21 ++++++++++++++++++++
 1 file changed, 21 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/6b2f5ad1/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/src/main/resources/META-INF/NOTICE
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/src/main/resources/META-INF/NOTICE b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/src/main/resources/META-INF/NOTICE
new file mode 100644
index 0000000..ed69bda
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-nar/src/main/resources/META-INF/NOTICE
@@ -0,0 +1,21 @@
+nifi-couchbase-nar
+Copyright 2014-2015 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+******************
+Apache Software License v2
+******************
+
+The following binary components are provided under the Apache Software License v2
+
+    (ASLv2) Couchbase Java SDK
+      The following NOTICE information applies:
+        Couchbase Java SDK
+        Copyright 2014 Couchbase, Inc.
+
+    (ASLv2) RxJava
+      The following NOTICE information applies:
+        Couchbase Java SDK
+        Copyright 2012 Netflix, Inc.
\ No newline at end of file


[14/17] nifi git commit: NIFI-810: rebased from master

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/b974445d/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --cc nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index 3c816d0,0c39eda..f69c510
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@@ -74,1241 -74,1242 +74,1241 @@@ import org.slf4j.LoggerFactory
   */
  public class StandardProcessorNode extends ProcessorNode implements Connectable {
  
 -	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
 -
 -	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
 -	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
 -	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
 -	private final AtomicReference<ProcessGroup> processGroup;
 -	private final Processor processor;
 -	private final AtomicReference<String> identifier;
 -	private final Map<Connection, Connectable> destinations;
 -	private final Map<Relationship, Set<Connection>> connections;
 -	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
 -	private final AtomicReference<List<Connection>> incomingConnectionsRef;
 -	private final ReentrantReadWriteLock rwLock;
 -	private final Lock readLock;
 -	private final Lock writeLock;
 -	private final AtomicBoolean isolated;
 -	private final AtomicBoolean lossTolerant;
 -	private final AtomicReference<ScheduledState> scheduledState;
 -	private final AtomicReference<String> comments;
 -	private final AtomicReference<String> name;
 -	private final AtomicReference<Position> position;
 -	private final AtomicReference<String> annotationData;
 -	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
 -	private final AtomicReference<String> yieldPeriod;
 -	private final AtomicReference<String> penalizationPeriod;
 -	private final AtomicReference<Map<String, String>> style;
 -	private final AtomicInteger concurrentTaskCount;
 -	private final AtomicLong yieldExpiration;
 -	private final AtomicLong schedulingNanos;
 -	private final boolean triggerWhenEmpty;
 -	private final boolean sideEffectFree;
 -	private final boolean triggeredSerially;
 -	private final boolean triggerWhenAnyDestinationAvailable;
 -	private final boolean eventDrivenSupported;
 -	private final boolean batchSupported;
 -	private final Requirement inputRequirement;
 -	private final ValidationContextFactory validationContextFactory;
 -	private final ProcessScheduler processScheduler;
 -	private long runNanos = 0L;
 -
 -	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
 -
 -	@SuppressWarnings("deprecation")
 -	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
 -		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
 -		super(processor, uuid, validationContextFactory, controllerServiceProvider);
 -
 -		this.processor = processor;
 -		identifier = new AtomicReference<>(uuid);
 -		destinations = new HashMap<>();
 -		connections = new HashMap<>();
 -		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
 -		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
 -		rwLock = new ReentrantReadWriteLock(false);
 -		readLock = rwLock.readLock();
 -		writeLock = rwLock.writeLock();
 -		lossTolerant = new AtomicBoolean(false);
 -		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
 -		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
 -		comments = new AtomicReference<>("");
 -		name = new AtomicReference<>(processor.getClass().getSimpleName());
 -		schedulingPeriod = new AtomicReference<>("0 sec");
 -		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
 -		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
 -		yieldExpiration = new AtomicLong(0L);
 -		concurrentTaskCount = new AtomicInteger(1);
 -		position = new AtomicReference<>(new Position(0D, 0D));
 -		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
 -		this.processGroup = new AtomicReference<>();
 -		processScheduler = scheduler;
 -		annotationData = new AtomicReference<>();
 -		isolated = new AtomicBoolean(false);
 -		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
 -
 -		final Class<?> procClass = processor.getClass();
 -		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
 -		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
 -		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
 -		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
 -		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
 -			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
 -		this.validationContextFactory = validationContextFactory;
 -		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
 -			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
 -
 -		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
 -		if (inputRequirementPresent) {
 -			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
 -		} else {
 -			inputRequirement = Requirement.INPUT_ALLOWED;
 -		}
 -
 -		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
 -	}
 -
 -	/**
 -	 * @return comments about this specific processor instance
 -	 */
 -	@Override
 -	public String getComments() {
 -		return comments.get();
 -	}
 -
 -	/**
 -	 * Provides and opportunity to retain information about this particular processor instance
 -	 *
 -	 * @param comments new comments
 -	 */
 -	@Override
 -	public void setComments(final String comments) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.comments.set(comments);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public ScheduledState getScheduledState() {
 -		return scheduledState.get();
 -	}
 -
 -	@Override
 -	public Position getPosition() {
 -		return position.get();
 -	}
 -
 -	@Override
 -	public void setPosition(Position position) {
 -		this.position.set(position);
 -	}
 -
 -	@Override
 -	public Map<String, String> getStyle() {
 -		return style.get();
 -	}
 -
 -	@Override
 -	public void setStyle(final Map<String, String> style) {
 -		if (style != null) {
 -			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
 -		}
 -	}
 -
 -	@Override
 -	public String getIdentifier() {
 -		return identifier.get();
 -	}
 -
 -	/**
 -	 * @return if true flow file content generated by this processor is considered loss tolerant
 -	 */
 -	@Override
 -	public boolean isLossTolerant() {
 -		return lossTolerant.get();
 -	}
 -
 -	@Override
 -	public boolean isIsolated() {
 -		return isolated.get();
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isTriggerWhenEmpty() {
 -		return triggerWhenEmpty;
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isSideEffectFree() {
 -		return sideEffectFree;
 -	}
 -
 -	@Override
 -	public boolean isHighThroughputSupported() {
 -		return batchSupported;
 -	}
 -
 -	/**
 -	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
 -	 */
 -	@Override
 -	public boolean isTriggerWhenAnyDestinationAvailable() {
 -		return triggerWhenAnyDestinationAvailable;
 -	}
 -
 -	/**
 -	 * Indicates whether flow file content made by this processor must be persisted
 -	 *
 -	 * @param lossTolerant tolerant
 -	 */
 -	@Override
 -	public void setLossTolerant(final boolean lossTolerant) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.lossTolerant.set(lossTolerant);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Indicates whether the processor runs on only the primary node.
 -	 *
 -	 * @param isolated isolated
 -	 */
 -	public void setIsolated(final boolean isolated) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.isolated.set(isolated);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isAutoTerminated(final Relationship relationship) {
 -		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
 -		if (terminatable == null) {
 -			return false;
 -		}
 -		return terminatable.contains(relationship);
 -	}
 -
 -	@Override
 -	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -
 -			for (final Relationship rel : terminate) {
 -				if (!getConnections(rel).isEmpty()) {
 -					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
 -				}
 -			}
 -			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
 -	 */
 -	@Override
 -	public Set<Relationship> getAutoTerminatedRelationships() {
 -		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
 -		if (relationships == null) {
 -			relationships = new HashSet<>();
 -		}
 -		return Collections.unmodifiableSet(relationships);
 -	}
 -
 -	@Override
 -	public String getName() {
 -		return name.get();
 -	}
 -
 -	/**
 -	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
 -	 */
 -	@SuppressWarnings("deprecation")
 -	public String getProcessorDescription() {
 -		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
 -		String description = null;
 -		if (capDesc != null) {
 -			description = capDesc.value();
 -		} else {
 -			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
 -			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
 -			if (deprecatedCapDesc != null) {
 -				description = deprecatedCapDesc.value();
 -			}
 -		}
 -
 -		return description;
 -	}
 -
 -	@Override
 -	public void setName(final String name) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			this.name.set(name);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
 -	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
 -	 */
 -	@Override
 -	public long getSchedulingPeriod(final TimeUnit timeUnit) {
 -		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
 -	}
 -
 -	@Override
 -	public boolean isEventDrivenSupported() {
 -		readLock.lock();
 -		try {
 -			return this.eventDrivenSupported;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Updates the Scheduling Strategy used for this Processor
 -	 *
 -	 * @param schedulingStrategy strategy
 -	 *
 -	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
 -	 */
 -	@Override
 -	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
 -		writeLock.lock();
 -		try {
 -			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
 -				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
 -				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
 -				// Mode. Instead, we will simply leave it in Timer-Driven mode
 -				return;
 -			}
 -
 -			this.schedulingStrategy = schedulingStrategy;
 -			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * @return the currently configured scheduling strategy
 -	 */
 -	@Override
 -	public SchedulingStrategy getSchedulingStrategy() {
 -		readLock.lock();
 -		try {
 -			return this.schedulingStrategy;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public String getSchedulingPeriod() {
 -		return schedulingPeriod.get();
 -	}
 -
 -	@Override
 -	public void setScheduldingPeriod(final String schedulingPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -
 -			switch (schedulingStrategy) {
 -				case CRON_DRIVEN: {
 -					try {
 -						new CronExpression(schedulingPeriod);
 -					} catch (final Exception e) {
 -						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
 -					}
 -				}
 -				break;
 -				case PRIMARY_NODE_ONLY:
 -				case TIMER_DRIVEN: {
 -					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
 -					if (schedulingNanos < 0) {
 -						throw new IllegalArgumentException("Scheduling Period must be positive");
 -					}
 -					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
 -				}
 -				break;
 -				case EVENT_DRIVEN:
 -				default:
 -					return;
 -			}
 -
 -			this.schedulingPeriod.set(schedulingPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public long getRunDuration(final TimeUnit timeUnit) {
 -		readLock.lock();
 -		try {
 -			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
 -		writeLock.lock();
 -		try {
 -			if (duration < 0) {
 -				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
 -			}
 -
 -			this.runNanos = timeUnit.toNanos(duration);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public long getYieldPeriod(final TimeUnit timeUnit) {
 -		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 -	}
 -
 -	@Override
 -	public String getYieldPeriod() {
 -		return yieldPeriod.get();
 -	}
 -
 -	@Override
 -	public void setYieldPeriod(final String yieldPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
 -			if (yieldMillis < 0) {
 -				throw new IllegalArgumentException("Yield duration must be positive");
 -			}
 -			this.yieldPeriod.set(yieldPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
 -	 * methods.
 -	 */
 -	@Override
 -	public void yield() {
 -		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
 -		yield(yieldMillis, TimeUnit.MILLISECONDS);
 -
 -		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
 -		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
 -	}
 -
 -	@Override
 -	public void yield(final long period, final TimeUnit timeUnit) {
 -		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
 -		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
 -
 -		processScheduler.yield(this);
 -	}
 -
 -	/**
 -	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
 -	 */
 -	@Override
 -	public long getYieldExpiration() {
 -		return yieldExpiration.get();
 -	}
 -
 -	@Override
 -	public long getPenalizationPeriod(final TimeUnit timeUnit) {
 -		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 -	}
 -
 -	@Override
 -	public String getPenalizationPeriod() {
 -		return penalizationPeriod.get();
 -	}
 -
 -	@Override
 -	public void setPenalizationPeriod(final String penalizationPeriod) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
 -			if (penalizationMillis < 0) {
 -				throw new IllegalArgumentException("Penalization duration must be positive");
 -			}
 -			this.penalizationPeriod.set(penalizationPeriod);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	/**
 -	 * Determines the number of concurrent tasks that may be running for this processor.
 -	 *
 -	 * @param taskCount a number of concurrent tasks this processor may have running
 -	 * @throws IllegalArgumentException if the given value is less than 1
 -	 */
 -	@Override
 -	public void setMaxConcurrentTasks(final int taskCount) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 -			}
 -			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
 -				throw new IllegalArgumentException();
 -			}
 -			if (!triggeredSerially) {
 -				concurrentTaskCount.set(taskCount);
 -			}
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isTriggeredSerially() {
 -		return triggeredSerially;
 -	}
 -
 -	/**
 -	 * @return the number of tasks that may execute concurrently for this processor
 -	 */
 -	@Override
 -	public int getMaxConcurrentTasks() {
 -		return concurrentTaskCount.get();
 -	}
 -
 -	@Override
 -	public LogLevel getBulletinLevel() {
 -		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
 -	}
 -
 -	@Override
 -	public void setBulletinLevel(final LogLevel level) {
 -		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
 -	}
 -
 -	@Override
 -	public Set<Connection> getConnections() {
 -		final Set<Connection> allConnections = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			for (final Set<Connection> connectionSet : connections.values()) {
 -				allConnections.addAll(connectionSet);
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -
 -		return allConnections;
 -	}
 -
 -	@Override
 -	public List<Connection> getIncomingConnections() {
 -		return incomingConnectionsRef.get();
 -	}
 -
 -	@Override
 -	public Set<Connection> getConnections(final Relationship relationship) {
 -		final Set<Connection> applicableConnections;
 -		readLock.lock();
 -		try {
 -			applicableConnections = connections.get(relationship);
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
 -	}
 -
 -	@Override
 -	public void addConnection(final Connection connection) {
 -		Objects.requireNonNull(connection, "connection cannot be null");
 -
 -		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
 -			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
 -		}
 -
 -		writeLock.lock();
 -		try {
 -			List<Connection> updatedIncoming = null;
 -			if (connection.getDestination().equals(this)) {
 -				// don't add the connection twice. This may occur if we have a self-loop because we will be told
 -				// to add the connection once because we are the source and again because we are the destination.
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				updatedIncoming = new ArrayList<>(incomingConnections);
 -				if (!updatedIncoming.contains(connection)) {
 -					updatedIncoming.add(connection);
 -				}
 -			}
 -
 -			if (connection.getSource().equals(this)) {
 -				// don't add the connection twice. This may occur if we have a self-loop because we will be told
 -				// to add the connection once because we are the source and again because we are the destination.
 -				if (!destinations.containsKey(connection)) {
 -					for (final Relationship relationship : connection.getRelationships()) {
 -						final Relationship rel = getRelationship(relationship.getName());
 -						Set<Connection> set = connections.get(rel);
 -						if (set == null) {
 -							set = new HashSet<>();
 -							connections.put(rel, set);
 -						}
 -
 -						set.add(connection);
 -
 -						destinations.put(connection, connection.getDestination());
 -					}
 -
 -					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 -					if (autoTerminated != null) {
 -						autoTerminated.removeAll(connection.getRelationships());
 -						this.undefinedRelationshipsToTerminate.set(autoTerminated);
 -					}
 -				}
 -			}
 -
 -			if (updatedIncoming != null) {
 -				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -			}
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean hasIncomingConnection() {
 -		return !incomingConnectionsRef.get().isEmpty();
 -	}
 -
 -	@Override
 -	public void updateConnection(final Connection connection) throws IllegalStateException {
 -		if (requireNonNull(connection).getSource().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				//
 -				// update any relationships
 -				//
 -				// first check if any relations were removed.
 -				final List<Relationship> existingRelationships = new ArrayList<>();
 -				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
 -					if (entry.getValue().contains(connection)) {
 -						existingRelationships.add(entry.getKey());
 -					}
 -				}
 -
 -				for (final Relationship rel : connection.getRelationships()) {
 -					if (!existingRelationships.contains(rel)) {
 -						// relationship was removed. Check if this is legal.
 -						final Set<Connection> connectionsForRelationship = getConnections(rel);
 -						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
 -							// if we are running and we do not terminate undefined relationships and this is the only
 -							// connection that defines the given relationship, and that relationship is required,
 -							// then it is not legal to remove this relationship from this connection.
 -							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
 -								+ this + ", which is currently running");
 -						}
 -					}
 -				}
 -
 -				// remove the connection from any list that currently contains
 -				for (final Set<Connection> list : connections.values()) {
 -					list.remove(connection);
 -				}
 -
 -				// add the connection in for all relationships listed.
 -				for (final Relationship rel : connection.getRelationships()) {
 -					Set<Connection> set = connections.get(rel);
 -					if (set == null) {
 -						set = new HashSet<>();
 -						connections.put(rel, set);
 -					}
 -					set.add(connection);
 -				}
 -
 -				// update to the new destination
 -				destinations.put(connection, connection.getDestination());
 -
 -				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 -				if (autoTerminated != null) {
 -					autoTerminated.removeAll(connection.getRelationships());
 -					this.undefinedRelationshipsToTerminate.set(autoTerminated);
 -				}
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (connection.getDestination().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				// update our incoming connections -- we can just remove & re-add the connection to
 -				// update the list.
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 -				updatedIncoming.remove(connection);
 -				updatedIncoming.add(connection);
 -				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -	}
 -
 -	@Override
 -	public void removeConnection(final Connection connection) {
 -		boolean connectionRemoved = false;
 -
 -		if (requireNonNull(connection).getSource().equals(this)) {
 -			for (final Relationship relationship : connection.getRelationships()) {
 -				final Set<Connection> connectionsForRelationship = getConnections(relationship);
 -				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
 -					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
 -				}
 -			}
 -
 -			writeLock.lock();
 -			try {
 -				for (final Set<Connection> connectionList : this.connections.values()) {
 -					connectionList.remove(connection);
 -				}
 -
 -				connectionRemoved = (destinations.remove(connection) != null);
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (connection.getDestination().equals(this)) {
 -			writeLock.lock();
 -			try {
 -				final List<Connection> incomingConnections = incomingConnectionsRef.get();
 -				if (incomingConnections.contains(connection)) {
 -					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 -					updatedIncoming.remove(connection);
 -					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 -					return;
 -				}
 -			} finally {
 -				writeLock.unlock();
 -			}
 -		}
 -
 -		if (!connectionRemoved) {
 -			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
 -		}
 -	}
 -
 -	/**
 -	 * @param relationshipName name
 -	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
 -	 */
 -	@Override
 -	public Relationship getRelationship(final String relationshipName) {
 -		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
 -		Relationship returnRel = specRel;
 -
 -		final Set<Relationship> relationships;
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			relationships = processor.getRelationships();
 -		}
 -
 -		for (final Relationship rel : relationships) {
 -			if (rel.equals(specRel)) {
 -				returnRel = rel;
 -				break;
 -			}
 -		}
 -		return returnRel;
 -	}
 -
 -	@Override
 -	public Processor getProcessor() {
 -		return this.processor;
 -	}
 -
 -	/**
 -	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
 -	 */
 -	public Set<Connectable> getDestinations() {
 -		final Set<Connectable> nonSelfDestinations = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			for (final Connectable connectable : destinations.values()) {
 -				if (connectable != this) {
 -					nonSelfDestinations.add(connectable);
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return nonSelfDestinations;
 -	}
 -
 -	public Set<Connectable> getDestinations(final Relationship relationship) {
 -		readLock.lock();
 -		try {
 -			final Set<Connectable> destinationSet = new HashSet<>();
 -			final Set<Connection> relationshipConnections = connections.get(relationship);
 -			if (relationshipConnections != null) {
 -				for (final Connection connection : relationshipConnections) {
 -					destinationSet.add(destinations.get(connection));
 -				}
 -			}
 -			return destinationSet;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	public Set<Relationship> getUndefinedRelationships() {
 -		final Set<Relationship> undefined = new HashSet<>();
 -		readLock.lock();
 -		try {
 -			final Set<Relationship> relationships;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				relationships = processor.getRelationships();
 -			}
 -
 -			if (relationships == null) {
 -				return undefined;
 -			}
 -			for (final Relationship relation : relationships) {
 -				final Set<Connection> connectionSet = this.connections.get(relation);
 -				if (connectionSet == null || connectionSet.isEmpty()) {
 -					undefined.add(relation);
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return undefined;
 -	}
 -
 -	/**
 -	 * Determines if the given node is a destination for this node
 -	 *
 -	 * @param node node
 -	 * @return true if is a direct destination node; false otherwise
 -	 */
 -	boolean isRelated(final ProcessorNode node) {
 -		readLock.lock();
 -		try {
 -			return this.destinations.containsValue(node);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isRunning() {
 -		readLock.lock();
 -		try {
 -			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public int getActiveThreadCount() {
 -		readLock.lock();
 -		try {
 -			return processScheduler.getActiveThreadCount(this);
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public boolean isValid() {
 -		readLock.lock();
 -		try {
 -			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 -
 -			final Collection<ValidationResult> validationResults;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				validationResults = getProcessor().validate(validationContext);
 -			}
 -
 -			for (final ValidationResult result : validationResults) {
 -				if (!result.isValid()) {
 -					return false;
 -				}
 -			}
 -
 -			for (final Relationship undef : getUndefinedRelationships()) {
 -				if (!isAutoTerminated(undef)) {
 -					return false;
 -				}
 -			}
 -
 -			switch (getInputRequirement()) {
 -				case INPUT_ALLOWED:
 -					break;
 -				case INPUT_FORBIDDEN: {
 -					if (!getIncomingConnections().isEmpty()) {
 -						return false;
 -					}
 -					break;
 -				}
 -				case INPUT_REQUIRED: {
 -					if (getIncomingConnections().isEmpty()) {
 -						return false;
 -					}
 -					break;
 -				}
 -			}
 -		} catch (final Throwable t) {
 -			return false;
 -		} finally {
 -			readLock.unlock();
 -		}
 -
 -		return true;
 -	}
 -
 -	@Override
 -	public Collection<ValidationResult> getValidationErrors() {
 -		final List<ValidationResult> results = new ArrayList<>();
 -		readLock.lock();
 -		try {
 -			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 -
 -			final Collection<ValidationResult> validationResults;
 -			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -				validationResults = getProcessor().validate(validationContext);
 -			}
 -
 -			for (final ValidationResult result : validationResults) {
 -				if (!result.isValid()) {
 -					results.add(result);
 -				}
 -			}
 -
 -			for (final Relationship relationship : getUndefinedRelationships()) {
 -				if (!isAutoTerminated(relationship)) {
 -					final ValidationResult error = new ValidationResult.Builder()
 -						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
 -						.subject("Relationship " + relationship.getName())
 -						.valid(false)
 -						.build();
 -					results.add(error);
 -				}
 -			}
 -
 -			switch (getInputRequirement()) {
 -				case INPUT_ALLOWED:
 -					break;
 -				case INPUT_FORBIDDEN: {
 -					final int incomingConnCount = getIncomingConnections().size();
 -					if (incomingConnCount != 0) {
 -						results.add(new ValidationResult.Builder()
 -							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
 -							.subject("Incoming Connections")
 -							.valid(false)
 -							.build());
 -					}
 -					break;
 -				}
 -				case INPUT_REQUIRED: {
 -					if (getIncomingConnections().isEmpty()) {
 -						results.add(new ValidationResult.Builder()
 -							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
 -							.subject("Incoming Connections")
 -							.valid(false)
 -							.build());
 -					}
 -					break;
 -				}
 -			}
 -		} catch (final Throwable t) {
 -			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
 -		} finally {
 -			readLock.unlock();
 -		}
 -		return results;
 -	}
 -
 -	@Override
 -	public Requirement getInputRequirement() {
 -		return inputRequirement;
 -	}
 -
 -	/**
 -	 * Establishes node equality (based on the processor's identifier)
 -	 *
 -	 * @param other node
 -	 * @return true if equal
 -	 */
 -	@Override
 -	public boolean equals(final Object other) {
 -		if (!(other instanceof ProcessorNode)) {
 -			return false;
 -		}
 -		final ProcessorNode on = (ProcessorNode) other;
 -		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
 -	}
 -
 -	@Override
 -	public int hashCode() {
 -		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
 -	}
 -
 -	@Override
 -	public Collection<Relationship> getRelationships() {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			return getProcessor().getRelationships();
 -		}
 -	}
 -
 -	@Override
 -	public String toString() {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			return getProcessor().toString();
 -		}
 -	}
 -
 -	@Override
 -	public ProcessGroup getProcessGroup() {
 -		return processGroup.get();
 -	}
 -
 -	@Override
 -	public void setProcessGroup(final ProcessGroup group) {
 -		writeLock.lock();
 -		try {
 -			this.processGroup.set(group);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
 -		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 -			processor.onTrigger(context, sessionFactory);
 -		}
 -	}
 -
 -	@Override
 -	public ConnectableType getConnectableType() {
 -		return ConnectableType.PROCESSOR;
 -	}
 -
 -	@Override
 -	public void setScheduledState(final ScheduledState scheduledState) {
 -		this.scheduledState.set(scheduledState);
 -		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
 -			yieldExpiration.set(0L);
 -		}
 -	}
 -
 -	@Override
 -	public void setAnnotationData(final String data) {
 -		writeLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
 -			}
 -
 -			this.annotationData.set(data);
 -		} finally {
 -			writeLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public String getAnnotationData() {
 -		return annotationData.get();
 -	}
 -
 -	@Override
 -	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
 -		return getValidationErrors();
 -	}
 -
 -	@Override
 -	public void verifyCanDelete() throws IllegalStateException {
 -		verifyCanDelete(false);
 -	}
 -
 -	@Override
 -	public void verifyCanDelete(final boolean ignoreConnections) {
 -		readLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException(this + " is running");
 -			}
 -
 -			if (!ignoreConnections) {
 -				for (final Set<Connection> connectionSet : connections.values()) {
 -					for (final Connection connection : connectionSet) {
 -						connection.verifyCanDelete();
 -					}
 -				}
 -
 -				for (final Connection connection : incomingConnectionsRef.get()) {
 -					if (connection.getSource().equals(this)) {
 -						connection.verifyCanDelete();
 -					} else {
 -						throw new IllegalStateException(this + " is the destination of another component");
 -					}
 -				}
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStart() {
 -		readLock.lock();
 -		try {
 -			switch (getScheduledState()) {
 -				case DISABLED:
 -					throw new IllegalStateException(this + " cannot be started because it is disabled");
 -				case RUNNING:
 -					throw new IllegalStateException(this + " cannot be started because it is already running");
 -				case STOPPED:
 -					break;
 -			}
 -			verifyNoActiveThreads();
 -
 -			if (!isValid()) {
 -				throw new IllegalStateException(this + " is not in a valid state");
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
 -		switch (getScheduledState()) {
 -			case DISABLED:
 -				throw new IllegalStateException(this + " cannot be started because it is disabled");
 -			case RUNNING:
 -				throw new IllegalStateException(this + " cannot be started because it is already running");
 -			case STOPPED:
 -				break;
 -		}
 -		verifyNoActiveThreads();
 -
 -		final Set<String> ids = new HashSet<>();
 -		for (final ControllerServiceNode node : ignoredReferences) {
 -			ids.add(node.getIdentifier());
 -		}
 -
 -		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
 -		for (final ValidationResult result : validationResults) {
 -			if (!result.isValid()) {
 -				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
 -			}
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanStop() {
 -		if (getScheduledState() != ScheduledState.RUNNING) {
 -			throw new IllegalStateException(this + " is not scheduled to run");
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanUpdate() {
 -		readLock.lock();
 -		try {
 -			if (isRunning()) {
 -				throw new IllegalStateException(this + " is not stopped");
 -			}
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanEnable() {
 -		readLock.lock();
 -		try {
 -			if (getScheduledState() != ScheduledState.DISABLED) {
 -				throw new IllegalStateException(this + " is not disabled");
 -			}
 -
 -			verifyNoActiveThreads();
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	@Override
 -	public void verifyCanDisable() {
 -		readLock.lock();
 -		try {
 -			if (getScheduledState() != ScheduledState.STOPPED) {
 -				throw new IllegalStateException(this + " is not stopped");
 -			}
 -			verifyNoActiveThreads();
 -		} finally {
 -			readLock.unlock();
 -		}
 -	}
 -
 -	private void verifyNoActiveThreads() throws IllegalStateException {
 -		final int threadCount = processScheduler.getActiveThreadCount(this);
 -		if (threadCount > 0) {
 -			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
 -		}
 -	}
 -
 -	@Override
 +    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
 +
 +    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
 +    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
 +    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
 +    private final AtomicReference<ProcessGroup> processGroup;
 +    private final Processor processor;
 +    private final AtomicReference<String> identifier;
 +    private final Map<Connection, Connectable> destinations;
 +    private final Map<Relationship, Set<Connection>> connections;
 +    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
 +    private final AtomicReference<List<Connection>> incomingConnectionsRef;
 +    private final ReentrantReadWriteLock rwLock;
 +    private final Lock readLock;
 +    private final Lock writeLock;
 +    private final AtomicBoolean isolated;
 +    private final AtomicBoolean lossTolerant;
 +    private final AtomicReference<ScheduledState> scheduledState;
 +    private final AtomicReference<String> comments;
 +    private final AtomicReference<String> name;
 +    private final AtomicReference<Position> position;
 +    private final AtomicReference<String> annotationData;
 +    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
 +    private final AtomicReference<String> yieldPeriod;
 +    private final AtomicReference<String> penalizationPeriod;
 +    private final AtomicReference<Map<String, String>> style;
 +    private final AtomicInteger concurrentTaskCount;
 +    private final AtomicLong yieldExpiration;
 +    private final AtomicLong schedulingNanos;
 +    private final boolean triggerWhenEmpty;
 +    private final boolean sideEffectFree;
 +    private final boolean triggeredSerially;
 +    private final boolean triggerWhenAnyDestinationAvailable;
 +    private final boolean eventDrivenSupported;
 +    private final boolean batchSupported;
 +    private final Requirement inputRequirement;
 +    private final ValidationContextFactory validationContextFactory;
 +    private final ProcessScheduler processScheduler;
 +    private long runNanos = 0L;
 +
 +    private SchedulingStrategy schedulingStrategy; // guarded by read/write lock
 +
 +    @SuppressWarnings("deprecation")
 +    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
 +        final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
 +        super(processor, uuid, validationContextFactory, controllerServiceProvider);
 +
 +        this.processor = processor;
 +        identifier = new AtomicReference<>(uuid);
 +        destinations = new HashMap<>();
 +        connections = new HashMap<>();
 +        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
 +        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
 +        rwLock = new ReentrantReadWriteLock(false);
 +        readLock = rwLock.readLock();
 +        writeLock = rwLock.writeLock();
 +        lossTolerant = new AtomicBoolean(false);
 +        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
 +        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
 +        comments = new AtomicReference<>("");
 +        name = new AtomicReference<>(processor.getClass().getSimpleName());
 +        schedulingPeriod = new AtomicReference<>("0 sec");
 +        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
 +        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
 +        yieldExpiration = new AtomicLong(0L);
 +        concurrentTaskCount = new AtomicInteger(1);
 +        position = new AtomicReference<>(new Position(0D, 0D));
 +        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
 +        this.processGroup = new AtomicReference<>();
 +        processScheduler = scheduler;
 +        annotationData = new AtomicReference<>();
 +        isolated = new AtomicBoolean(false);
 +        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
 +
 +        final Class<?> procClass = processor.getClass();
 +        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
 +        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
 +        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
 +        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
 +        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
 +            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
 +        this.validationContextFactory = validationContextFactory;
 +        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
 +            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
 +
 +        final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
 +        if (inputRequirementPresent) {
 +            inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
 +        } else {
 +            inputRequirement = Requirement.INPUT_ALLOWED;
 +        }
 +
 +        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
 +    }
 +
 +    /**
 +     * @return comments about this specific processor instance
 +     */
 +    @Override
 +    public String getComments() {
 +        return comments.get();
 +    }
 +
 +    /**
 +     * Provides and opportunity to retain information about this particular processor instance
 +     *
 +     * @param comments new comments
 +     */
 +    @Override
 +    public void setComments(final String comments) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.comments.set(comments);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public ScheduledState getScheduledState() {
 +        return scheduledState.get();
 +    }
 +
 +    @Override
 +    public Position getPosition() {
 +        return position.get();
 +    }
 +
 +    @Override
 +    public void setPosition(Position position) {
 +        this.position.set(position);
 +    }
 +
 +    @Override
 +    public Map<String, String> getStyle() {
 +        return style.get();
 +    }
 +
 +    @Override
 +    public void setStyle(final Map<String, String> style) {
 +        if (style != null) {
 +            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
 +        }
 +    }
 +
 +    @Override
 +    public String getIdentifier() {
 +        return identifier.get();
 +    }
 +
 +    /**
 +     * @return if true flow file content generated by this processor is considered loss tolerant
 +     */
 +    @Override
 +    public boolean isLossTolerant() {
 +        return lossTolerant.get();
 +    }
 +
 +    @Override
 +    public boolean isIsolated() {
 +        return isolated.get();
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isTriggerWhenEmpty() {
 +        return triggerWhenEmpty;
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isSideEffectFree() {
 +        return sideEffectFree;
 +    }
 +
 +    @Override
 +    public boolean isHighThroughputSupported() {
 +        return batchSupported;
 +    }
 +
 +    /**
 +     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
 +     */
 +    @Override
 +    public boolean isTriggerWhenAnyDestinationAvailable() {
 +        return triggerWhenAnyDestinationAvailable;
 +    }
 +
 +    /**
 +     * Indicates whether flow file content made by this processor must be persisted
 +     *
 +     * @param lossTolerant tolerant
 +     */
 +    @Override
 +    public void setLossTolerant(final boolean lossTolerant) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.lossTolerant.set(lossTolerant);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Indicates whether the processor runs on only the primary node.
 +     *
 +     * @param isolated isolated
 +     */
 +    public void setIsolated(final boolean isolated) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.isolated.set(isolated);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isAutoTerminated(final Relationship relationship) {
 +        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
 +        if (terminatable == null) {
 +            return false;
 +        }
 +        return terminatable.contains(relationship);
 +    }
 +
 +    @Override
 +    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +
 +            for (final Relationship rel : terminate) {
 +                if (!getConnections(rel).isEmpty()) {
 +                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
 +                }
 +            }
 +            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
 +     */
 +    @Override
 +    public Set<Relationship> getAutoTerminatedRelationships() {
 +        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
 +        if (relationships == null) {
 +            relationships = new HashSet<>();
 +        }
 +        return Collections.unmodifiableSet(relationships);
 +    }
 +
 +    @Override
 +    public String getName() {
 +        return name.get();
 +    }
 +
 +    /**
 +     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
 +     */
 +    @SuppressWarnings("deprecation")
 +    public String getProcessorDescription() {
 +        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
 +        String description = null;
 +        if (capDesc != null) {
 +            description = capDesc.value();
 +        } else {
 +            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
 +            if (deprecatedCapDesc != null) {
 +                description = deprecatedCapDesc.value();
 +            }
 +        }
 +
 +        return description;
 +    }
 +
 +    @Override
 +    public void setName(final String name) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            this.name.set(name);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
 +     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
 +     */
 +    @Override
 +    public long getSchedulingPeriod(final TimeUnit timeUnit) {
 +        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
 +    }
 +
 +    @Override
 +    public boolean isEventDrivenSupported() {
 +        readLock.lock();
 +        try {
 +            return this.eventDrivenSupported;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Updates the Scheduling Strategy used for this Processor
 +     *
 +     * @param schedulingStrategy strategy
 +     *
 +     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
 +     */
 +    @Override
 +    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
 +        writeLock.lock();
 +        try {
 +            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
 +                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
 +                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
 +                // Mode. Instead, we will simply leave it in Timer-Driven mode
 +                return;
 +            }
 +
 +            this.schedulingStrategy = schedulingStrategy;
 +            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * @return the currently configured scheduling strategy
 +     */
 +    @Override
 +    public SchedulingStrategy getSchedulingStrategy() {
 +        readLock.lock();
 +        try {
 +            return this.schedulingStrategy;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public String getSchedulingPeriod() {
 +        return schedulingPeriod.get();
 +    }
 +
 +    @Override
 +    public void setScheduldingPeriod(final String schedulingPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +
 +            switch (schedulingStrategy) {
 +                case CRON_DRIVEN: {
 +                    try {
 +                        new CronExpression(schedulingPeriod);
 +                    } catch (final Exception e) {
 +                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
 +                    }
 +                }
 +                    break;
 +                case PRIMARY_NODE_ONLY:
 +                case TIMER_DRIVEN: {
 +                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
 +                    if (schedulingNanos < 0) {
 +                        throw new IllegalArgumentException("Scheduling Period must be positive");
 +                    }
 +                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
 +                }
 +                    break;
 +                case EVENT_DRIVEN:
 +                default:
 +                    return;
 +            }
 +
 +            this.schedulingPeriod.set(schedulingPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public long getRunDuration(final TimeUnit timeUnit) {
 +        readLock.lock();
 +        try {
 +            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
 +        writeLock.lock();
 +        try {
 +            if (duration < 0) {
 +                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
 +            }
 +
 +            this.runNanos = timeUnit.toNanos(duration);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public long getYieldPeriod(final TimeUnit timeUnit) {
 +        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 +    }
 +
 +    @Override
 +    public String getYieldPeriod() {
 +        return yieldPeriod.get();
 +    }
 +
 +    @Override
 +    public void setYieldPeriod(final String yieldPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
 +            if (yieldMillis < 0) {
 +                throw new IllegalArgumentException("Yield duration must be positive");
 +            }
 +            this.yieldPeriod.set(yieldPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
 +     * methods.
 +     */
 +    @Override
 +    public void yield() {
 +        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
 +        yield(yieldMillis, TimeUnit.MILLISECONDS);
 +
 +        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
 +        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
 +    }
 +
 +    @Override
 +    public void yield(final long period, final TimeUnit timeUnit) {
 +        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
 +        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
 +
 +        processScheduler.yield(this);
 +    }
 +
 +    /**
 +     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
 +     */
 +    @Override
 +    public long getYieldExpiration() {
 +        return yieldExpiration.get();
 +    }
 +
 +    @Override
 +    public long getPenalizationPeriod(final TimeUnit timeUnit) {
 +        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
 +    }
 +
 +    @Override
 +    public String getPenalizationPeriod() {
 +        return penalizationPeriod.get();
 +    }
 +
 +    @Override
 +    public void setPenalizationPeriod(final String penalizationPeriod) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
 +            if (penalizationMillis < 0) {
 +                throw new IllegalArgumentException("Penalization duration must be positive");
 +            }
 +            this.penalizationPeriod.set(penalizationPeriod);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    /**
 +     * Determines the number of concurrent tasks that may be running for this processor.
 +     *
 +     * @param taskCount a number of concurrent tasks this processor may have running
 +     * @throws IllegalArgumentException if the given value is less than 1
 +     */
 +    @Override
 +    public void setMaxConcurrentTasks(final int taskCount) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
 +            }
 +            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
 +                throw new IllegalArgumentException();
 +            }
 +            if (!triggeredSerially) {
 +                concurrentTaskCount.set(taskCount);
 +            }
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isTriggeredSerially() {
 +        return triggeredSerially;
 +    }
 +
 +    /**
 +     * @return the number of tasks that may execute concurrently for this processor
 +     */
 +    @Override
 +    public int getMaxConcurrentTasks() {
 +        return concurrentTaskCount.get();
 +    }
 +
 +    @Override
 +    public LogLevel getBulletinLevel() {
 +        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
 +    }
 +
 +    @Override
 +    public void setBulletinLevel(final LogLevel level) {
 +        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
 +    }
 +
 +    @Override
 +    public Set<Connection> getConnections() {
 +        final Set<Connection> allConnections = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            for (final Set<Connection> connectionSet : connections.values()) {
 +                allConnections.addAll(connectionSet);
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +
 +        return allConnections;
 +    }
 +
 +    @Override
 +    public List<Connection> getIncomingConnections() {
 +        return incomingConnectionsRef.get();
 +    }
 +
 +    @Override
 +    public Set<Connection> getConnections(final Relationship relationship) {
 +        final Set<Connection> applicableConnections;
 +        readLock.lock();
 +        try {
 +            applicableConnections = connections.get(relationship);
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return (applicableConnections == null) ? Collections.<Connection> emptySet() : Collections.unmodifiableSet(applicableConnections);
 +    }
 +
 +    @Override
 +    public void addConnection(final Connection connection) {
 +        Objects.requireNonNull(connection, "connection cannot be null");
 +
 +        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
 +            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
 +        }
 +
 +        writeLock.lock();
 +        try {
 +            List<Connection> updatedIncoming = null;
 +            if (connection.getDestination().equals(this)) {
 +                // don't add the connection twice. This may occur if we have a self-loop because we will be told
 +                // to add the connection once because we are the source and again because we are the destination.
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                updatedIncoming = new ArrayList<>(incomingConnections);
 +                if (!updatedIncoming.contains(connection)) {
 +                    updatedIncoming.add(connection);
 +                }
 +            }
 +
 +            if (connection.getSource().equals(this)) {
 +                // don't add the connection twice. This may occur if we have a self-loop because we will be told
 +                // to add the connection once because we are the source and again because we are the destination.
 +                if (!destinations.containsKey(connection)) {
 +                    for (final Relationship relationship : connection.getRelationships()) {
 +                        final Relationship rel = getRelationship(relationship.getName());
 +                        Set<Connection> set = connections.get(rel);
 +                        if (set == null) {
 +                            set = new HashSet<>();
 +                            connections.put(rel, set);
 +                        }
 +
 +                        set.add(connection);
 +
 +                        destinations.put(connection, connection.getDestination());
 +                    }
 +
 +                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 +                    if (autoTerminated != null) {
 +                        autoTerminated.removeAll(connection.getRelationships());
 +                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
 +                    }
 +                }
 +            }
 +
 +            if (updatedIncoming != null) {
 +                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +            }
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean hasIncomingConnection() {
 +        return !incomingConnectionsRef.get().isEmpty();
 +    }
 +
 +    @Override
 +    public void updateConnection(final Connection connection) throws IllegalStateException {
 +        if (requireNonNull(connection).getSource().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                //
 +                // update any relationships
 +                //
 +                // first check if any relations were removed.
 +                final List<Relationship> existingRelationships = new ArrayList<>();
 +                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
 +                    if (entry.getValue().contains(connection)) {
 +                        existingRelationships.add(entry.getKey());
 +                    }
 +                }
 +
 +                for (final Relationship rel : connection.getRelationships()) {
 +                    if (!existingRelationships.contains(rel)) {
 +                        // relationship was removed. Check if this is legal.
 +                        final Set<Connection> connectionsForRelationship = getConnections(rel);
 +                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
 +                            // if we are running and we do not terminate undefined relationships and this is the only
 +                            // connection that defines the given relationship, and that relationship is required,
 +                            // then it is not legal to remove this relationship from this connection.
 +                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
 +                                + this + ", which is currently running");
 +                        }
 +                    }
 +                }
 +
 +                // remove the connection from any list that currently contains
 +                for (final Set<Connection> list : connections.values()) {
 +                    list.remove(connection);
 +                }
 +
 +                // add the connection in for all relationships listed.
 +                for (final Relationship rel : connection.getRelationships()) {
 +                    Set<Connection> set = connections.get(rel);
 +                    if (set == null) {
 +                        set = new HashSet<>();
 +                        connections.put(rel, set);
 +                    }
 +                    set.add(connection);
 +                }
 +
 +                // update to the new destination
 +                destinations.put(connection, connection.getDestination());
 +
 +                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
 +                if (autoTerminated != null) {
 +                    autoTerminated.removeAll(connection.getRelationships());
 +                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
 +                }
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (connection.getDestination().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                // update our incoming connections -- we can just remove & re-add the connection to
 +                // update the list.
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 +                updatedIncoming.remove(connection);
 +                updatedIncoming.add(connection);
 +                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void removeConnection(final Connection connection) {
 +        boolean connectionRemoved = false;
 +
 +        if (requireNonNull(connection).getSource().equals(this)) {
 +            for (final Relationship relationship : connection.getRelationships()) {
 +                final Set<Connection> connectionsForRelationship = getConnections(relationship);
 +                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
 +                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
 +                }
 +            }
 +
 +            writeLock.lock();
 +            try {
 +                for (final Set<Connection> connectionList : this.connections.values()) {
 +                    connectionList.remove(connection);
 +                }
 +
 +                connectionRemoved = (destinations.remove(connection) != null);
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (connection.getDestination().equals(this)) {
 +            writeLock.lock();
 +            try {
 +                final List<Connection> incomingConnections = incomingConnectionsRef.get();
 +                if (incomingConnections.contains(connection)) {
 +                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
 +                    updatedIncoming.remove(connection);
 +                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
 +                    return;
 +                }
 +            } finally {
 +                writeLock.unlock();
 +            }
 +        }
 +
 +        if (!connectionRemoved) {
 +            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
 +        }
 +    }
 +
 +    /**
 +     * @param relationshipName name
 +     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
 +     */
 +    @Override
 +    public Relationship getRelationship(final String relationshipName) {
 +        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
 +        Relationship returnRel = specRel;
 +
 +        final Set<Relationship> relationships;
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            relationships = processor.getRelationships();
 +        }
 +
 +        for (final Relationship rel : relationships) {
 +            if (rel.equals(specRel)) {
 +                returnRel = rel;
 +                break;
 +            }
 +        }
 +        return returnRel;
 +    }
 +
 +    @Override
 +    public Processor getProcessor() {
 +        return this.processor;
 +    }
 +
 +    /**
 +     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
 +     */
 +    public Set<Connectable> getDestinations() {
 +        final Set<Connectable> nonSelfDestinations = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            for (final Connectable connectable : destinations.values()) {
 +                if (connectable != this) {
 +                    nonSelfDestinations.add(connectable);
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return nonSelfDestinations;
 +    }
 +
 +    public Set<Connectable> getDestinations(final Relationship relationship) {
 +        readLock.lock();
 +        try {
 +            final Set<Connectable> destinationSet = new HashSet<>();
 +            final Set<Connection> relationshipConnections = connections.get(relationship);
 +            if (relationshipConnections != null) {
 +                for (final Connection connection : relationshipConnections) {
 +                    destinationSet.add(destinations.get(connection));
 +                }
 +            }
 +            return destinationSet;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    public Set<Relationship> getUndefinedRelationships() {
 +        final Set<Relationship> undefined = new HashSet<>();
 +        readLock.lock();
 +        try {
 +            final Set<Relationship> relationships;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                relationships = processor.getRelationships();
 +            }
 +
 +            if (relationships == null) {
 +                return undefined;
 +            }
 +            for (final Relationship relation : relationships) {
 +                final Set<Connection> connectionSet = this.connections.get(relation);
 +                if (connectionSet == null || connectionSet.isEmpty()) {
 +                    undefined.add(relation);
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return undefined;
 +    }
 +
 +    /**
 +     * Determines if the given node is a destination for this node
 +     *
 +     * @param node node
 +     * @return true if is a direct destination node; false otherwise
 +     */
 +    boolean isRelated(final ProcessorNode node) {
 +        readLock.lock();
 +        try {
 +            return this.destinations.containsValue(node);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isRunning() {
 +        readLock.lock();
 +        try {
 +            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public int getActiveThreadCount() {
 +        readLock.lock();
 +        try {
 +            return processScheduler.getActiveThreadCount(this);
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public boolean isValid() {
 +        readLock.lock();
 +        try {
 +            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 +
 +            final Collection<ValidationResult> validationResults;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                validationResults = getProcessor().validate(validationContext);
 +            }
 +
 +            for (final ValidationResult result : validationResults) {
 +                if (!result.isValid()) {
 +                    return false;
 +                }
 +            }
 +
 +            for (final Relationship undef : getUndefinedRelationships()) {
 +                if (!isAutoTerminated(undef)) {
 +                    return false;
 +                }
 +            }
 +
 +            switch (getInputRequirement()) {
 +                case INPUT_ALLOWED:
 +                    break;
 +                case INPUT_FORBIDDEN: {
 +                    if (!getIncomingConnections().isEmpty()) {
 +                        return false;
 +                    }
 +                    break;
 +                }
 +                case INPUT_REQUIRED: {
 +                    if (getIncomingConnections().isEmpty()) {
 +                        return false;
 +                    }
 +                    break;
 +                }
 +            }
 +        } catch (final Throwable t) {
 +            return false;
 +        } finally {
 +            readLock.unlock();
 +        }
 +
 +        return true;
 +    }
 +
 +    @Override
 +    public Collection<ValidationResult> getValidationErrors() {
 +        final List<ValidationResult> results = new ArrayList<>();
 +        readLock.lock();
 +        try {
 +            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
 +
 +            final Collection<ValidationResult> validationResults;
 +            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +                validationResults = getProcessor().validate(validationContext);
 +            }
 +
 +            for (final ValidationResult result : validationResults) {
 +                if (!result.isValid()) {
 +                    results.add(result);
 +                }
 +            }
 +
 +            for (final Relationship relationship : getUndefinedRelationships()) {
 +                if (!isAutoTerminated(relationship)) {
 +                    final ValidationResult error = new ValidationResult.Builder()
 +                        .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
 +                        .subject("Relationship " + relationship.getName())
 +                        .valid(false)
 +                        .build();
 +                    results.add(error);
 +                }
 +            }
 +
 +            switch (getInputRequirement()) {
 +                case INPUT_ALLOWED:
 +                    break;
 +                case INPUT_FORBIDDEN: {
 +                    final int incomingConnCount = getIncomingConnections().size();
 +                    if (incomingConnCount != 0) {
 +                        results.add(new ValidationResult.Builder()
-                             .explanation("Processor is currently configured with " + incomingConnCount + " upstream connections but does not accept any upstream connections")
-                             .subject("Upstream Connections")
++                            .explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
++                            .subject("Incoming Connections")
 +                            .valid(false)
 +                            .build());
 +                    }
 +                    break;
 +                }
 +                case INPUT_REQUIRED: {
 +                    if (getIncomingConnections().isEmpty()) {
 +                        results.add(new ValidationResult.Builder()
-                             .explanation("Processor requires an upstream connection but currently has none")
-                             .subject("Upstream Connections")
++                            .explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
++                            .subject("Incoming Connections")
 +                            .valid(false)
 +                            .build());
 +                    }
 +                    break;
 +                }
 +            }
 +        } catch (final Throwable t) {
 +            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
 +        } finally {
 +            readLock.unlock();
 +        }
 +        return results;
 +    }
 +
 +    @Override
 +    public Requirement getInputRequirement() {
 +        return inputRequirement;
 +    }
 +
 +    /**
 +     * Establishes node equality (based on the processor's identifier)
 +     *
 +     * @param other node
 +     * @return true if equal
 +     */
 +    @Override
 +    public boolean equals(final Object other) {
 +        if (!(other instanceof ProcessorNode)) {
 +            return false;
 +        }
 +        final ProcessorNode on = (ProcessorNode) other;
 +        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
 +    }
 +
 +    @Override
 +    public int hashCode() {
 +        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
 +    }
 +
 +    @Override
 +    public Collection<Relationship> getRelationships() {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            return getProcessor().getRelationships();
 +        }
 +    }
 +
 +    @Override
 +    public String toString() {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            return getProcessor().toString();
 +        }
 +    }
 +
 +    @Override
 +    public ProcessGroup getProcessGroup() {
 +        return processGroup.get();
 +    }
 +
 +    @Override
 +    public void setProcessGroup(final ProcessGroup group) {
 +        writeLock.lock();
 +        try {
 +            this.processGroup.set(group);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
 +        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
 +            processor.onTrigger(context, sessionFactory);
 +        }
 +    }
 +
 +    @Override
 +    public ConnectableType getConnectableType() {
 +        return ConnectableType.PROCESSOR;
 +    }
 +
 +    @Override
 +    public void setScheduledState(final ScheduledState scheduledState) {
 +        this.scheduledState.set(scheduledState);
 +        if (!scheduledState.equals(ScheduledState.RUNNING)) { // if user stops processor, clear yield expiration
 +            yieldExpiration.set(0L);
 +        }
 +    }
 +
 +    @Override
 +    public void setAnnotationData(final String data) {
 +        writeLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
 +            }
 +
 +            this.annotationData.set(data);
 +        } finally {
 +            writeLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public String getAnnotationData() {
 +        return annotationData.get();
 +    }
 +
 +    @Override
 +    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
 +        return getValidationErrors();
 +    }
 +
 +    @Override
 +    public void verifyCanDelete() throws IllegalStateException {
 +        verifyCanDelete(false);
 +    }
 +
 +    @Override
 +    public void verifyCanDelete(final boolean ignoreConnections) {
 +        readLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException(this + " is running");
 +            }
 +
 +            if (!ignoreConnections) {
 +                for (final Set<Connection> connectionSet : connections.values()) {
 +                    for (final Connection connection : connectionSet) {
 +                        connection.verifyCanDelete();
 +                    }
 +                }
 +
 +                for (final Connection connection : incomingConnectionsRef.get()) {
 +                    if (connection.getSource().equals(this)) {
 +                        connection.verifyCanDelete();
 +                    } else {
 +                        throw new IllegalStateException(this + " is the destination of another component");
 +                    }
 +                }
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStart() {
 +        readLock.lock();
 +        try {
 +            switch (getScheduledState()) {
 +                case DISABLED:
 +                    throw new IllegalStateException(this + " cannot be started because it is disabled");
 +                case RUNNING:
 +                    throw new IllegalStateException(this + " cannot be started because it is already running");
 +                case STOPPED:
 +                    break;
 +            }
 +            verifyNoActiveThreads();
 +
 +            if (!isValid()) {
 +                throw new IllegalStateException(this + " is not in a valid state");
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
 +        switch (getScheduledState()) {
 +            case DISABLED:
 +                throw new IllegalStateException(this + " cannot be started because it is disabled");
 +            case RUNNING:
 +                throw new IllegalStateException(this + " cannot be started because it is already running");
 +            case STOPPED:
 +                break;
 +        }
 +        verifyNoActiveThreads();
 +
 +        final Set<String> ids = new HashSet<>();
 +        for (final ControllerServiceNode node : ignoredReferences) {
 +            ids.add(node.getIdentifier());
 +        }
 +
 +        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
 +        for (final ValidationResult result : validationResults) {
 +            if (!result.isValid()) {
 +                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanStop() {
 +        if (getScheduledState() != ScheduledState.RUNNING) {
 +            throw new IllegalStateException(this + " is not scheduled to run");
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanUpdate() {
 +        readLock.lock();
 +        try {
 +            if (isRunning()) {
 +                throw new IllegalStateException(this + " is not stopped");
 +            }
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanEnable() {
 +        readLock.lock();
 +        try {
 +            if (getScheduledState() != ScheduledState.DISABLED) {
 +                throw new IllegalStateException(this + " is not disabled");
 +            }
 +
 +            verifyNoActiveThreads();
 +        } finally {
 +            readLock.unlock();
 +        }
 +    }
 +
 +    @Override
 +    public void verifyCanDisable() {
 +        readLock.lock();
 +        try {
 +            if (getScheduledState() != ScheduledState.STOPPED) {
 +               

<TRUNCATED>

[03/17] nifi git commit: nifi-992 Improvements based on code review part II.

Posted by ma...@apache.org.
nifi-992 Improvements based on code review part II.

- Penalize or Yield based on ErrorHandlingStrategy.Penalty
- Add Retry relationship to PutCouchbaseKey
- Remove unnecessary try/catch and let the framework handle it
- Change CouchbaseException relation mapping for Fatal from Failure to Retry

Signed-off-by: Bryan Bende <bb...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/033a1553
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/033a1553
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/033a1553

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 033a1553abe7c83d4d9c49b753911355a881dcc5
Parents: 72eb64e
Author: ijokarumawak <ij...@gmail.com>
Authored: Thu Oct 1 15:05:29 2015 +0900
Committer: Bryan Bende <bb...@apache.org>
Committed: Thu Oct 1 14:13:20 2015 -0400

----------------------------------------------------------------------
 .../couchbase/AbstractCouchbaseProcessor.java   | 16 ++++++++++-
 .../couchbase/ErrorHandlingStrategy.java        | 28 +++++++++++++++++---
 .../processors/couchbase/GetCouchbaseKey.java   | 28 +++++++++-----------
 .../processors/couchbase/PutCouchbaseKey.java   | 14 ++++------
 .../couchbase/TestGetCouchbaseKey.java          | 19 ++++++-------
 .../couchbase/TestPutCouchbaseKey.java          | 13 ++++++---
 6 files changed, 76 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
index 066b1ca..b879041 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
@@ -175,18 +175,32 @@ public abstract class AbstractCouchbaseProcessor extends AbstractProcessor {
 
     /**
      * Handles the thrown CocuhbaseException accordingly.
+     * @param context a process context
      * @param session a process session
      * @param logger a logger
      * @param inFile an input FlowFile
      * @param e the thrown CouchbaseException
      * @param errMsg a message to be logged
      */
-    protected void handleCouchbaseException(final ProcessSession session,
+    protected void handleCouchbaseException(final ProcessContext context, final ProcessSession session,
             final ProcessorLog logger, FlowFile inFile, CouchbaseException e,
             String errMsg) {
         logger.error(errMsg, e);
         if(inFile != null){
             ErrorHandlingStrategy strategy = CouchbaseExceptionMappings.getStrategy(e);
+            switch(strategy.penalty()) {
+            case Penalize:
+                if(logger.isDebugEnabled()) logger.debug("Penalized: {}", new Object[]{inFile});
+                inFile = session.penalize(inFile);
+                break;
+            case Yield:
+                if(logger.isDebugEnabled()) logger.debug("Yielded context: {}", new Object[]{inFile});
+                context.yield();
+                break;
+            case None:
+                break;
+            }
+
             switch(strategy.result()) {
             case ProcessException:
                 throw new ProcessException(errMsg, e);

http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
index 75b8f46..bae35d5 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nifi.processors.couchbase;
 
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.None;
 import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Penalize;
 import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Yield;
 import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.Failure;
@@ -25,11 +26,32 @@ import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.
 
 public enum ErrorHandlingStrategy {
 
+    /**
+     * Processor setting has to be fixed, in order to NOT call failing processor
+     * frequently, this it be yielded.
+     */
     ConfigurationError(ProcessException, Yield),
-    InvalidInput(Failure, Penalize),
+    /**
+     * The input FlowFile will be sent to the failure relationship for further
+     * processing without penalizing. Basically, the FlowFile shouldn't be sent
+     * this processor again unless the issue has been solved.
+     */
+    InvalidInput(Failure, None),
+    /**
+     * Couchbase cluster is in unhealthy state. Retrying maybe successful,
+     * but it should be yielded for a while.
+     */
     TemporalClusterError(Retry, Yield),
+    /**
+     * The FlowFile was not processed successfully due to some temporal error
+     * related to this specific FlowFile or document. Retrying maybe successful,
+     * but it should be penalized for a while.
+     */
     TemporalFlowFileError(Retry, Penalize),
-    Fatal(Failure, Yield);
+    /**
+     * The error can't be recovered without DataFlow Manager intervention.
+     */
+    Fatal(Retry, Yield);
 
     private final Result result;
     private final Penalty penalty;
@@ -46,7 +68,7 @@ public enum ErrorHandlingStrategy {
      * Indicating yield or penalize the processing when transfer the input FlowFile.
      */
     public enum Penalty {
-        Yield, Penalize;
+        Yield, Penalize, None;
     }
 
     public Result result(){

http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
index 8c15e29..4aa9677 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
@@ -89,21 +89,17 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
         FlowFile inFile = session.get();
 
         String docId = null;
-        try {
-            if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
-                docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(inFile).getValue();
-            } else {
-                final byte[] content = new byte[(int) inFile.getSize()];
-                session.read(inFile, new InputStreamCallback() {
-                    @Override
-                    public void process(final InputStream in) throws IOException {
-                        StreamUtils.fillBuffer(in, content, true);
-                    }
-                });
-                docId = new String(content, StandardCharsets.UTF_8);
-            }
-        } catch (Throwable t) {
-            throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + inFile);
+        if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
+            docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(inFile).getValue();
+        } else {
+            final byte[] content = new byte[(int) inFile.getSize()];
+            session.read(inFile, new InputStreamCallback() {
+                @Override
+                public void process(final InputStream in) throws IOException {
+                    StreamUtils.fillBuffer(in, content, true);
+                }
+            });
+            docId = new String(content, StandardCharsets.UTF_8);
         }
 
         if(StringUtils.isEmpty(docId)){
@@ -163,7 +159,7 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
 
         } catch (CouchbaseException e){
             String errMsg = String.format("Getting docuement %s from Couchbase Server using %s failed due to %s", docId, inFile, e);
-            handleCouchbaseException(session, logger, inFile, e, errMsg);
+            handleCouchbaseException(context, session, logger, inFile, e, errMsg);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
index 8f41383..2aa803c 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
@@ -99,6 +99,7 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
     @Override
     protected void addSupportedRelationships(Set<Relationship> relationships) {
         relationships.add(REL_SUCCESS);
+        relationships.add(REL_RETRY);
         relationships.add(REL_FAILURE);
     }
 
@@ -110,7 +111,6 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
             return;
         }
 
-        String docId = null;
         final byte[] content = new byte[(int) flowFile.getSize()];
         session.read(flowFile, new InputStreamCallback() {
             @Override
@@ -119,13 +119,9 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
             }
         });
 
-        try {
-            docId = String.valueOf(flowFile.getAttribute(CoreAttributes.UUID.key()));
-            if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
-                docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
-            }
-        } catch (Throwable t) {
-            throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + flowFile);
+        String docId = String.valueOf(flowFile.getAttribute(CoreAttributes.UUID.key()));
+        if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
+            docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
         }
 
         try {
@@ -158,7 +154,7 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
 
         } catch (CouchbaseException e) {
             String errMsg = String.format("Writing docuement %s to Couchbase Server using %s failed due to %s", docId, flowFile, e);
-            handleCouchbaseException(session, logger, flowFile, e, errMsg);
+            handleCouchbaseException(context, session, logger, flowFile, e, errMsg);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
index dca2ae3..108980c 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
@@ -34,6 +34,7 @@ import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageException;
 import org.apache.nifi.couchbase.CouchbaseAttributes;
 import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
 import org.apache.nifi.processor.exception.ProcessException;
@@ -181,9 +182,9 @@ public class TestGetCouchbaseKey {
 
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("Exception should be thrown.");
         } catch (AssertionError e){
-            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+            Assert.assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
         }
 
         testRunner.assertTransferCount(REL_SUCCESS, 0);
@@ -208,9 +209,9 @@ public class TestGetCouchbaseKey {
         testRunner.enqueue(inFileData, properties);
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("Exception should be thrown.");
         } catch (AssertionError e){
-            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+            Assert.assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
         }
 
         testRunner.assertTransferCount(REL_SUCCESS, 0);
@@ -286,7 +287,7 @@ public class TestGetCouchbaseKey {
         testRunner.enqueue(inFileData);
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("ProcessException should be thrown.");
         } catch (AssertionError e){
             Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
         }
@@ -313,7 +314,7 @@ public class TestGetCouchbaseKey {
         testRunner.enqueue(inFileData);
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("ProcessException should be thrown.");
         } catch (AssertionError e){
             Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
             Assert.assertTrue(e.getCause().getCause().getClass().equals(AuthenticationException.class));
@@ -424,9 +425,9 @@ public class TestGetCouchbaseKey {
 
         testRunner.assertTransferCount(REL_SUCCESS, 0);
         testRunner.assertTransferCount(REL_ORIGINAL, 0);
-        testRunner.assertTransferCount(REL_RETRY, 0);
-        testRunner.assertTransferCount(REL_FAILURE, 1);
-        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        testRunner.assertTransferCount(REL_RETRY, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
         orgFile.assertContentEquals(inputFileDataStr);
         orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
     }

http://git-wip-us.apache.org/repos/asf/nifi/blob/033a1553/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
index 0388e35..f870593 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
@@ -37,6 +37,7 @@ import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageException;
 import org.apache.nifi.couchbase.CouchbaseAttributes;
 import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -167,7 +168,10 @@ public class TestPutCouchbaseKey {
         testRunner.enqueue(inFileDataBytes, properties);
         testRunner.run();
 
-        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+        ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
+        verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+        assertEquals(somePropertyValue, capture.getValue().id());
+        assertEquals(inFileData, capture.getValue().content());
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
         testRunner.assertTransferCount(REL_RETRY, 0);
@@ -196,9 +200,9 @@ public class TestPutCouchbaseKey {
         testRunner.enqueue(inFileDataBytes, properties);
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("Exception should be thrown.");
         } catch (AssertionError e){
-            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+            Assert.assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
         }
 
         testRunner.assertTransferCount(REL_SUCCESS, 0);
@@ -226,6 +230,7 @@ public class TestPutCouchbaseKey {
         ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
         verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
         assertEquals(uuid, capture.getValue().id());
+        assertEquals(inFileData, capture.getValue().content());
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
         testRunner.assertTransferCount(REL_RETRY, 0);
@@ -253,7 +258,7 @@ public class TestPutCouchbaseKey {
         testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
         try {
             testRunner.run();
-            fail("ProcessException should be throws.");
+            fail("ProcessException should be thrown.");
         } catch (AssertionError e){
             Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
         }


[06/17] nifi git commit: Bumped nifi-kafka-processors Kafka version

Posted by ma...@apache.org.
Bumped nifi-kafka-processors Kafka version

This closes #99.

Signed-off-by: Aldrin Piri <al...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/31fba6b3
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/31fba6b3
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/31fba6b3

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 31fba6b3332978ca2f6a1d693f6053d719fb9daa
Parents: 6b2f5ad
Author: Randy Gelhausen <rg...@gmail.com>
Authored: Fri Oct 2 18:09:02 2015 -0400
Committer: Aldrin Piri <al...@apache.org>
Committed: Mon Oct 5 10:39:22 2015 -0400

----------------------------------------------------------------------
 nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/31fba6b3/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml
index e4db495..ff10c09 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/pom.xml
@@ -36,8 +36,8 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kafka</groupId>
-            <artifactId>kafka_2.8.2</artifactId>
-            <version>0.8.1</version>
+            <artifactId>kafka_2.9.1</artifactId>
+            <version>0.8.2.2</version>
             <exclusions>
                 <!-- Transitive dependencies excluded because they are located 
                 in a legacy Maven repository, which Maven 3 doesn't support. -->


[12/17] nifi git commit: NIFI-810: Updated the wording on validation errors due to upstream connections

Posted by ma...@apache.org.
NIFI-810: Updated the wording on validation errors due to upstream connections


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/5ecdb185
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/5ecdb185
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/5ecdb185

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 5ecdb1858e94938cd426b12bc48d3725109c6e96
Parents: 13edcfd
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:26:04 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../nifi/controller/StandardProcessorNode.java  | 2475 +++++++++---------
 1 file changed, 1237 insertions(+), 1238 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/5ecdb185/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index 0c39eda..3c816d0 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -74,1242 +74,1241 @@ import org.slf4j.LoggerFactory;
  */
 public class StandardProcessorNode extends ProcessorNode implements Connectable {
 
-	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
-
-	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
-	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
-	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
-	private final AtomicReference<ProcessGroup> processGroup;
-	private final Processor processor;
-	private final AtomicReference<String> identifier;
-	private final Map<Connection, Connectable> destinations;
-	private final Map<Relationship, Set<Connection>> connections;
-	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
-	private final AtomicReference<List<Connection>> incomingConnectionsRef;
-	private final ReentrantReadWriteLock rwLock;
-	private final Lock readLock;
-	private final Lock writeLock;
-	private final AtomicBoolean isolated;
-	private final AtomicBoolean lossTolerant;
-	private final AtomicReference<ScheduledState> scheduledState;
-	private final AtomicReference<String> comments;
-	private final AtomicReference<String> name;
-	private final AtomicReference<Position> position;
-	private final AtomicReference<String> annotationData;
-	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
-	private final AtomicReference<String> yieldPeriod;
-	private final AtomicReference<String> penalizationPeriod;
-	private final AtomicReference<Map<String, String>> style;
-	private final AtomicInteger concurrentTaskCount;
-	private final AtomicLong yieldExpiration;
-	private final AtomicLong schedulingNanos;
-	private final boolean triggerWhenEmpty;
-	private final boolean sideEffectFree;
-	private final boolean triggeredSerially;
-	private final boolean triggerWhenAnyDestinationAvailable;
-	private final boolean eventDrivenSupported;
-	private final boolean batchSupported;
-	private final Requirement inputRequirement;
-	private final ValidationContextFactory validationContextFactory;
-	private final ProcessScheduler processScheduler;
-	private long runNanos = 0L;
-
-	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
-
-	@SuppressWarnings("deprecation")
-	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
-		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
-		super(processor, uuid, validationContextFactory, controllerServiceProvider);
-
-		this.processor = processor;
-		identifier = new AtomicReference<>(uuid);
-		destinations = new HashMap<>();
-		connections = new HashMap<>();
-		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
-		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
-		rwLock = new ReentrantReadWriteLock(false);
-		readLock = rwLock.readLock();
-		writeLock = rwLock.writeLock();
-		lossTolerant = new AtomicBoolean(false);
-		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
-		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
-		comments = new AtomicReference<>("");
-		name = new AtomicReference<>(processor.getClass().getSimpleName());
-		schedulingPeriod = new AtomicReference<>("0 sec");
-		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
-		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
-		yieldExpiration = new AtomicLong(0L);
-		concurrentTaskCount = new AtomicInteger(1);
-		position = new AtomicReference<>(new Position(0D, 0D));
-		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
-		this.processGroup = new AtomicReference<>();
-		processScheduler = scheduler;
-		annotationData = new AtomicReference<>();
-		isolated = new AtomicBoolean(false);
-		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
-
-		final Class<?> procClass = processor.getClass();
-		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
-		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
-		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
-		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
-		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
-			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
-		this.validationContextFactory = validationContextFactory;
-		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
-			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
-
-		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
-		if (inputRequirementPresent) {
-			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
-		} else {
-			inputRequirement = Requirement.INPUT_ALLOWED;
-		}
-
-		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
-	}
-
-	/**
-	 * @return comments about this specific processor instance
-	 */
-	@Override
-	public String getComments() {
-		return comments.get();
-	}
-
-	/**
-	 * Provides and opportunity to retain information about this particular processor instance
-	 *
-	 * @param comments new comments
-	 */
-	@Override
-	public void setComments(final String comments) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.comments.set(comments);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public ScheduledState getScheduledState() {
-		return scheduledState.get();
-	}
-
-	@Override
-	public Position getPosition() {
-		return position.get();
-	}
-
-	@Override
-	public void setPosition(Position position) {
-		this.position.set(position);
-	}
-
-	@Override
-	public Map<String, String> getStyle() {
-		return style.get();
-	}
-
-	@Override
-	public void setStyle(final Map<String, String> style) {
-		if (style != null) {
-			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
-		}
-	}
-
-	@Override
-	public String getIdentifier() {
-		return identifier.get();
-	}
-
-	/**
-	 * @return if true flow file content generated by this processor is considered loss tolerant
-	 */
-	@Override
-	public boolean isLossTolerant() {
-		return lossTolerant.get();
-	}
-
-	@Override
-	public boolean isIsolated() {
-		return isolated.get();
-	}
-
-	/**
-	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isTriggerWhenEmpty() {
-		return triggerWhenEmpty;
-	}
-
-	/**
-	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isSideEffectFree() {
-		return sideEffectFree;
-	}
-
-	@Override
-	public boolean isHighThroughputSupported() {
-		return batchSupported;
-	}
-
-	/**
-	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
-	 */
-	@Override
-	public boolean isTriggerWhenAnyDestinationAvailable() {
-		return triggerWhenAnyDestinationAvailable;
-	}
-
-	/**
-	 * Indicates whether flow file content made by this processor must be persisted
-	 *
-	 * @param lossTolerant tolerant
-	 */
-	@Override
-	public void setLossTolerant(final boolean lossTolerant) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.lossTolerant.set(lossTolerant);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Indicates whether the processor runs on only the primary node.
-	 *
-	 * @param isolated isolated
-	 */
-	public void setIsolated(final boolean isolated) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.isolated.set(isolated);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isAutoTerminated(final Relationship relationship) {
-		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
-		if (terminatable == null) {
-			return false;
-		}
-		return terminatable.contains(relationship);
-	}
-
-	@Override
-	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-
-			for (final Relationship rel : terminate) {
-				if (!getConnections(rel).isEmpty()) {
-					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
-				}
-			}
-			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
-	 */
-	@Override
-	public Set<Relationship> getAutoTerminatedRelationships() {
-		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
-		if (relationships == null) {
-			relationships = new HashSet<>();
-		}
-		return Collections.unmodifiableSet(relationships);
-	}
-
-	@Override
-	public String getName() {
-		return name.get();
-	}
-
-	/**
-	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
-	 */
-	@SuppressWarnings("deprecation")
-	public String getProcessorDescription() {
-		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
-		String description = null;
-		if (capDesc != null) {
-			description = capDesc.value();
-		} else {
-			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
-			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
-			if (deprecatedCapDesc != null) {
-				description = deprecatedCapDesc.value();
-			}
-		}
-
-		return description;
-	}
-
-	@Override
-	public void setName(final String name) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			this.name.set(name);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
-	 */
-	@Override
-	public long getSchedulingPeriod(final TimeUnit timeUnit) {
-		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
-	}
-
-	@Override
-	public boolean isEventDrivenSupported() {
-		readLock.lock();
-		try {
-			return this.eventDrivenSupported;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	/**
-	 * Updates the Scheduling Strategy used for this Processor
-	 *
-	 * @param schedulingStrategy strategy
-	 *
-	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
-	 */
-	@Override
-	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
-		writeLock.lock();
-		try {
-			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
-				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
-				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
-				// Mode. Instead, we will simply leave it in Timer-Driven mode
-				return;
-			}
-
-			this.schedulingStrategy = schedulingStrategy;
-			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * @return the currently configured scheduling strategy
-	 */
-	@Override
-	public SchedulingStrategy getSchedulingStrategy() {
-		readLock.lock();
-		try {
-			return this.schedulingStrategy;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public String getSchedulingPeriod() {
-		return schedulingPeriod.get();
-	}
-
-	@Override
-	public void setScheduldingPeriod(final String schedulingPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-
-			switch (schedulingStrategy) {
-				case CRON_DRIVEN: {
-					try {
-						new CronExpression(schedulingPeriod);
-					} catch (final Exception e) {
-						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
-					}
-				}
-				break;
-				case PRIMARY_NODE_ONLY:
-				case TIMER_DRIVEN: {
-					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
-					if (schedulingNanos < 0) {
-						throw new IllegalArgumentException("Scheduling Period must be positive");
-					}
-					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
-				}
-				break;
-				case EVENT_DRIVEN:
-				default:
-					return;
-			}
-
-			this.schedulingPeriod.set(schedulingPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public long getRunDuration(final TimeUnit timeUnit) {
-		readLock.lock();
-		try {
-			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
-		writeLock.lock();
-		try {
-			if (duration < 0) {
-				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
-			}
-
-			this.runNanos = timeUnit.toNanos(duration);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public long getYieldPeriod(final TimeUnit timeUnit) {
-		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-	}
-
-	@Override
-	public String getYieldPeriod() {
-		return yieldPeriod.get();
-	}
-
-	@Override
-	public void setYieldPeriod(final String yieldPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
-			if (yieldMillis < 0) {
-				throw new IllegalArgumentException("Yield duration must be positive");
-			}
-			this.yieldPeriod.set(yieldPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
-	 * methods.
-	 */
-	@Override
-	public void yield() {
-		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
-		yield(yieldMillis, TimeUnit.MILLISECONDS);
-
-		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
-		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
-	}
-
-	@Override
-	public void yield(final long period, final TimeUnit timeUnit) {
-		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
-		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
-
-		processScheduler.yield(this);
-	}
-
-	/**
-	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
-	 */
-	@Override
-	public long getYieldExpiration() {
-		return yieldExpiration.get();
-	}
-
-	@Override
-	public long getPenalizationPeriod(final TimeUnit timeUnit) {
-		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-	}
-
-	@Override
-	public String getPenalizationPeriod() {
-		return penalizationPeriod.get();
-	}
-
-	@Override
-	public void setPenalizationPeriod(final String penalizationPeriod) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
-			if (penalizationMillis < 0) {
-				throw new IllegalArgumentException("Penalization duration must be positive");
-			}
-			this.penalizationPeriod.set(penalizationPeriod);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	/**
-	 * Determines the number of concurrent tasks that may be running for this processor.
-	 *
-	 * @param taskCount a number of concurrent tasks this processor may have running
-	 * @throws IllegalArgumentException if the given value is less than 1
-	 */
-	@Override
-	public void setMaxConcurrentTasks(final int taskCount) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-			}
-			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
-				throw new IllegalArgumentException();
-			}
-			if (!triggeredSerially) {
-				concurrentTaskCount.set(taskCount);
-			}
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isTriggeredSerially() {
-		return triggeredSerially;
-	}
-
-	/**
-	 * @return the number of tasks that may execute concurrently for this processor
-	 */
-	@Override
-	public int getMaxConcurrentTasks() {
-		return concurrentTaskCount.get();
-	}
-
-	@Override
-	public LogLevel getBulletinLevel() {
-		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
-	}
-
-	@Override
-	public void setBulletinLevel(final LogLevel level) {
-		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
-	}
-
-	@Override
-	public Set<Connection> getConnections() {
-		final Set<Connection> allConnections = new HashSet<>();
-		readLock.lock();
-		try {
-			for (final Set<Connection> connectionSet : connections.values()) {
-				allConnections.addAll(connectionSet);
-			}
-		} finally {
-			readLock.unlock();
-		}
-
-		return allConnections;
-	}
-
-	@Override
-	public List<Connection> getIncomingConnections() {
-		return incomingConnectionsRef.get();
-	}
-
-	@Override
-	public Set<Connection> getConnections(final Relationship relationship) {
-		final Set<Connection> applicableConnections;
-		readLock.lock();
-		try {
-			applicableConnections = connections.get(relationship);
-		} finally {
-			readLock.unlock();
-		}
-		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
-	}
-
-	@Override
-	public void addConnection(final Connection connection) {
-		Objects.requireNonNull(connection, "connection cannot be null");
-
-		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
-			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
-		}
-
-		writeLock.lock();
-		try {
-			List<Connection> updatedIncoming = null;
-			if (connection.getDestination().equals(this)) {
-				// don't add the connection twice. This may occur if we have a self-loop because we will be told
-				// to add the connection once because we are the source and again because we are the destination.
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				updatedIncoming = new ArrayList<>(incomingConnections);
-				if (!updatedIncoming.contains(connection)) {
-					updatedIncoming.add(connection);
-				}
-			}
-
-			if (connection.getSource().equals(this)) {
-				// don't add the connection twice. This may occur if we have a self-loop because we will be told
-				// to add the connection once because we are the source and again because we are the destination.
-				if (!destinations.containsKey(connection)) {
-					for (final Relationship relationship : connection.getRelationships()) {
-						final Relationship rel = getRelationship(relationship.getName());
-						Set<Connection> set = connections.get(rel);
-						if (set == null) {
-							set = new HashSet<>();
-							connections.put(rel, set);
-						}
-
-						set.add(connection);
-
-						destinations.put(connection, connection.getDestination());
-					}
-
-					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-					if (autoTerminated != null) {
-						autoTerminated.removeAll(connection.getRelationships());
-						this.undefinedRelationshipsToTerminate.set(autoTerminated);
-					}
-				}
-			}
-
-			if (updatedIncoming != null) {
-				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-			}
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean hasIncomingConnection() {
-		return !incomingConnectionsRef.get().isEmpty();
-	}
-
-	@Override
-	public void updateConnection(final Connection connection) throws IllegalStateException {
-		if (requireNonNull(connection).getSource().equals(this)) {
-			writeLock.lock();
-			try {
-				//
-				// update any relationships
-				//
-				// first check if any relations were removed.
-				final List<Relationship> existingRelationships = new ArrayList<>();
-				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
-					if (entry.getValue().contains(connection)) {
-						existingRelationships.add(entry.getKey());
-					}
-				}
-
-				for (final Relationship rel : connection.getRelationships()) {
-					if (!existingRelationships.contains(rel)) {
-						// relationship was removed. Check if this is legal.
-						final Set<Connection> connectionsForRelationship = getConnections(rel);
-						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
-							// if we are running and we do not terminate undefined relationships and this is the only
-							// connection that defines the given relationship, and that relationship is required,
-							// then it is not legal to remove this relationship from this connection.
-							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
-								+ this + ", which is currently running");
-						}
-					}
-				}
-
-				// remove the connection from any list that currently contains
-				for (final Set<Connection> list : connections.values()) {
-					list.remove(connection);
-				}
-
-				// add the connection in for all relationships listed.
-				for (final Relationship rel : connection.getRelationships()) {
-					Set<Connection> set = connections.get(rel);
-					if (set == null) {
-						set = new HashSet<>();
-						connections.put(rel, set);
-					}
-					set.add(connection);
-				}
-
-				// update to the new destination
-				destinations.put(connection, connection.getDestination());
-
-				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-				if (autoTerminated != null) {
-					autoTerminated.removeAll(connection.getRelationships());
-					this.undefinedRelationshipsToTerminate.set(autoTerminated);
-				}
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (connection.getDestination().equals(this)) {
-			writeLock.lock();
-			try {
-				// update our incoming connections -- we can just remove & re-add the connection to
-				// update the list.
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-				updatedIncoming.remove(connection);
-				updatedIncoming.add(connection);
-				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-			} finally {
-				writeLock.unlock();
-			}
-		}
-	}
-
-	@Override
-	public void removeConnection(final Connection connection) {
-		boolean connectionRemoved = false;
-
-		if (requireNonNull(connection).getSource().equals(this)) {
-			for (final Relationship relationship : connection.getRelationships()) {
-				final Set<Connection> connectionsForRelationship = getConnections(relationship);
-				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
-					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
-				}
-			}
-
-			writeLock.lock();
-			try {
-				for (final Set<Connection> connectionList : this.connections.values()) {
-					connectionList.remove(connection);
-				}
-
-				connectionRemoved = (destinations.remove(connection) != null);
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (connection.getDestination().equals(this)) {
-			writeLock.lock();
-			try {
-				final List<Connection> incomingConnections = incomingConnectionsRef.get();
-				if (incomingConnections.contains(connection)) {
-					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-					updatedIncoming.remove(connection);
-					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-					return;
-				}
-			} finally {
-				writeLock.unlock();
-			}
-		}
-
-		if (!connectionRemoved) {
-			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
-		}
-	}
-
-	/**
-	 * @param relationshipName name
-	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
-	 */
-	@Override
-	public Relationship getRelationship(final String relationshipName) {
-		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
-		Relationship returnRel = specRel;
-
-		final Set<Relationship> relationships;
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			relationships = processor.getRelationships();
-		}
-
-		for (final Relationship rel : relationships) {
-			if (rel.equals(specRel)) {
-				returnRel = rel;
-				break;
-			}
-		}
-		return returnRel;
-	}
-
-	@Override
-	public Processor getProcessor() {
-		return this.processor;
-	}
-
-	/**
-	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
-	 */
-	public Set<Connectable> getDestinations() {
-		final Set<Connectable> nonSelfDestinations = new HashSet<>();
-		readLock.lock();
-		try {
-			for (final Connectable connectable : destinations.values()) {
-				if (connectable != this) {
-					nonSelfDestinations.add(connectable);
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-		return nonSelfDestinations;
-	}
-
-	public Set<Connectable> getDestinations(final Relationship relationship) {
-		readLock.lock();
-		try {
-			final Set<Connectable> destinationSet = new HashSet<>();
-			final Set<Connection> relationshipConnections = connections.get(relationship);
-			if (relationshipConnections != null) {
-				for (final Connection connection : relationshipConnections) {
-					destinationSet.add(destinations.get(connection));
-				}
-			}
-			return destinationSet;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	public Set<Relationship> getUndefinedRelationships() {
-		final Set<Relationship> undefined = new HashSet<>();
-		readLock.lock();
-		try {
-			final Set<Relationship> relationships;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				relationships = processor.getRelationships();
-			}
-
-			if (relationships == null) {
-				return undefined;
-			}
-			for (final Relationship relation : relationships) {
-				final Set<Connection> connectionSet = this.connections.get(relation);
-				if (connectionSet == null || connectionSet.isEmpty()) {
-					undefined.add(relation);
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-		return undefined;
-	}
-
-	/**
-	 * Determines if the given node is a destination for this node
-	 *
-	 * @param node node
-	 * @return true if is a direct destination node; false otherwise
-	 */
-	boolean isRelated(final ProcessorNode node) {
-		readLock.lock();
-		try {
-			return this.destinations.containsValue(node);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isRunning() {
-		readLock.lock();
-		try {
-			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public int getActiveThreadCount() {
-		readLock.lock();
-		try {
-			return processScheduler.getActiveThreadCount(this);
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public boolean isValid() {
-		readLock.lock();
-		try {
-			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-			final Collection<ValidationResult> validationResults;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				validationResults = getProcessor().validate(validationContext);
-			}
-
-			for (final ValidationResult result : validationResults) {
-				if (!result.isValid()) {
-					return false;
-				}
-			}
-
-			for (final Relationship undef : getUndefinedRelationships()) {
-				if (!isAutoTerminated(undef)) {
-					return false;
-				}
-			}
-
-			switch (getInputRequirement()) {
-				case INPUT_ALLOWED:
-					break;
-				case INPUT_FORBIDDEN: {
-					if (!getIncomingConnections().isEmpty()) {
-						return false;
-					}
-					break;
-				}
-				case INPUT_REQUIRED: {
-					if (getIncomingConnections().isEmpty()) {
-						return false;
-					}
-					break;
-				}
-			}
-		} catch (final Throwable t) {
-			return false;
-		} finally {
-			readLock.unlock();
-		}
-
-		return true;
-	}
-
-	@Override
-	public Collection<ValidationResult> getValidationErrors() {
-		final List<ValidationResult> results = new ArrayList<>();
-		readLock.lock();
-		try {
-			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-			final Collection<ValidationResult> validationResults;
-			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-				validationResults = getProcessor().validate(validationContext);
-			}
-
-			for (final ValidationResult result : validationResults) {
-				if (!result.isValid()) {
-					results.add(result);
-				}
-			}
-
-			for (final Relationship relationship : getUndefinedRelationships()) {
-				if (!isAutoTerminated(relationship)) {
-					final ValidationResult error = new ValidationResult.Builder()
-						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
-						.subject("Relationship " + relationship.getName())
-						.valid(false)
-						.build();
-					results.add(error);
-				}
-			}
-
-			switch (getInputRequirement()) {
-				case INPUT_ALLOWED:
-					break;
-				case INPUT_FORBIDDEN: {
-					final int incomingConnCount = getIncomingConnections().size();
-					if (incomingConnCount != 0) {
-						results.add(new ValidationResult.Builder()
-							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
-							.subject("Incoming Connections")
-							.valid(false)
-							.build());
-					}
-					break;
-				}
-				case INPUT_REQUIRED: {
-					if (getIncomingConnections().isEmpty()) {
-						results.add(new ValidationResult.Builder()
-							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
-							.subject("Incoming Connections")
-							.valid(false)
-							.build());
-					}
-					break;
-				}
-			}
-		} catch (final Throwable t) {
-			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
-		} finally {
-			readLock.unlock();
-		}
-		return results;
-	}
-
-	@Override
-	public Requirement getInputRequirement() {
-		return inputRequirement;
-	}
-
-	/**
-	 * Establishes node equality (based on the processor's identifier)
-	 *
-	 * @param other node
-	 * @return true if equal
-	 */
-	@Override
-	public boolean equals(final Object other) {
-		if (!(other instanceof ProcessorNode)) {
-			return false;
-		}
-		final ProcessorNode on = (ProcessorNode) other;
-		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
-	}
-
-	@Override
-	public int hashCode() {
-		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
-	}
-
-	@Override
-	public Collection<Relationship> getRelationships() {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			return getProcessor().getRelationships();
-		}
-	}
-
-	@Override
-	public String toString() {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			return getProcessor().toString();
-		}
-	}
-
-	@Override
-	public ProcessGroup getProcessGroup() {
-		return processGroup.get();
-	}
-
-	@Override
-	public void setProcessGroup(final ProcessGroup group) {
-		writeLock.lock();
-		try {
-			this.processGroup.set(group);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
-		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-			processor.onTrigger(context, sessionFactory);
-		}
-	}
-
-	@Override
-	public ConnectableType getConnectableType() {
-		return ConnectableType.PROCESSOR;
-	}
-
-	@Override
-	public void setScheduledState(final ScheduledState scheduledState) {
-		this.scheduledState.set(scheduledState);
-		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
-			yieldExpiration.set(0L);
-		}
-	}
-
-	@Override
-	public void setAnnotationData(final String data) {
-		writeLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
-			}
-
-			this.annotationData.set(data);
-		} finally {
-			writeLock.unlock();
-		}
-	}
-
-	@Override
-	public String getAnnotationData() {
-		return annotationData.get();
-	}
-
-	@Override
-	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
-		return getValidationErrors();
-	}
-
-	@Override
-	public void verifyCanDelete() throws IllegalStateException {
-		verifyCanDelete(false);
-	}
-
-	@Override
-	public void verifyCanDelete(final boolean ignoreConnections) {
-		readLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException(this + " is running");
-			}
-
-			if (!ignoreConnections) {
-				for (final Set<Connection> connectionSet : connections.values()) {
-					for (final Connection connection : connectionSet) {
-						connection.verifyCanDelete();
-					}
-				}
-
-				for (final Connection connection : incomingConnectionsRef.get()) {
-					if (connection.getSource().equals(this)) {
-						connection.verifyCanDelete();
-					} else {
-						throw new IllegalStateException(this + " is the destination of another component");
-					}
-				}
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanStart() {
-		readLock.lock();
-		try {
-			switch (getScheduledState()) {
-				case DISABLED:
-					throw new IllegalStateException(this + " cannot be started because it is disabled");
-				case RUNNING:
-					throw new IllegalStateException(this + " cannot be started because it is already running");
-				case STOPPED:
-					break;
-			}
-			verifyNoActiveThreads();
-
-			if (!isValid()) {
-				throw new IllegalStateException(this + " is not in a valid state");
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
-		switch (getScheduledState()) {
-			case DISABLED:
-				throw new IllegalStateException(this + " cannot be started because it is disabled");
-			case RUNNING:
-				throw new IllegalStateException(this + " cannot be started because it is already running");
-			case STOPPED:
-				break;
-		}
-		verifyNoActiveThreads();
-
-		final Set<String> ids = new HashSet<>();
-		for (final ControllerServiceNode node : ignoredReferences) {
-			ids.add(node.getIdentifier());
-		}
-
-		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
-		for (final ValidationResult result : validationResults) {
-			if (!result.isValid()) {
-				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
-			}
-		}
-	}
-
-	@Override
-	public void verifyCanStop() {
-		if (getScheduledState() != ScheduledState.RUNNING) {
-			throw new IllegalStateException(this + " is not scheduled to run");
-		}
-	}
-
-	@Override
-	public void verifyCanUpdate() {
-		readLock.lock();
-		try {
-			if (isRunning()) {
-				throw new IllegalStateException(this + " is not stopped");
-			}
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanEnable() {
-		readLock.lock();
-		try {
-			if (getScheduledState() != ScheduledState.DISABLED) {
-				throw new IllegalStateException(this + " is not disabled");
-			}
-
-			verifyNoActiveThreads();
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	@Override
-	public void verifyCanDisable() {
-		readLock.lock();
-		try {
-			if (getScheduledState() != ScheduledState.STOPPED) {
-				throw new IllegalStateException(this + " is not stopped");
-			}
-			verifyNoActiveThreads();
-		} finally {
-			readLock.unlock();
-		}
-	}
-
-	private void verifyNoActiveThreads() throws IllegalStateException {
-		final int threadCount = processScheduler.getActiveThreadCount(this);
-		if (threadCount > 0) {
-			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
-		}
-	}
-
-	@Override
-	public void verifyModifiable() throws IllegalStateException {
-		if (isRunning()) {
-			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-		}
-	}
+    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
+
+    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
+    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
+    private final AtomicReference<ProcessGroup> processGroup;
+    private final Processor processor;
+    private final AtomicReference<String> identifier;
+    private final Map<Connection, Connectable> destinations;
+    private final Map<Relationship, Set<Connection>> connections;
+    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
+    private final AtomicReference<List<Connection>> incomingConnectionsRef;
+    private final ReentrantReadWriteLock rwLock;
+    private final Lock readLock;
+    private final Lock writeLock;
+    private final AtomicBoolean isolated;
+    private final AtomicBoolean lossTolerant;
+    private final AtomicReference<ScheduledState> scheduledState;
+    private final AtomicReference<String> comments;
+    private final AtomicReference<String> name;
+    private final AtomicReference<Position> position;
+    private final AtomicReference<String> annotationData;
+    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
+    private final AtomicReference<String> yieldPeriod;
+    private final AtomicReference<String> penalizationPeriod;
+    private final AtomicReference<Map<String, String>> style;
+    private final AtomicInteger concurrentTaskCount;
+    private final AtomicLong yieldExpiration;
+    private final AtomicLong schedulingNanos;
+    private final boolean triggerWhenEmpty;
+    private final boolean sideEffectFree;
+    private final boolean triggeredSerially;
+    private final boolean triggerWhenAnyDestinationAvailable;
+    private final boolean eventDrivenSupported;
+    private final boolean batchSupported;
+    private final Requirement inputRequirement;
+    private final ValidationContextFactory validationContextFactory;
+    private final ProcessScheduler processScheduler;
+    private long runNanos = 0L;
+
+    private SchedulingStrategy schedulingStrategy; // guarded by read/write lock
+
+    @SuppressWarnings("deprecation")
+    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
+        final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
+        super(processor, uuid, validationContextFactory, controllerServiceProvider);
+
+        this.processor = processor;
+        identifier = new AtomicReference<>(uuid);
+        destinations = new HashMap<>();
+        connections = new HashMap<>();
+        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
+        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+        rwLock = new ReentrantReadWriteLock(false);
+        readLock = rwLock.readLock();
+        writeLock = rwLock.writeLock();
+        lossTolerant = new AtomicBoolean(false);
+        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
+        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
+        comments = new AtomicReference<>("");
+        name = new AtomicReference<>(processor.getClass().getSimpleName());
+        schedulingPeriod = new AtomicReference<>("0 sec");
+        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
+        yieldExpiration = new AtomicLong(0L);
+        concurrentTaskCount = new AtomicInteger(1);
+        position = new AtomicReference<>(new Position(0D, 0D));
+        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
+        this.processGroup = new AtomicReference<>();
+        processScheduler = scheduler;
+        annotationData = new AtomicReference<>();
+        isolated = new AtomicBoolean(false);
+        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
+
+        final Class<?> procClass = processor.getClass();
+        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
+        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
+        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
+        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
+        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
+            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
+        this.validationContextFactory = validationContextFactory;
+        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
+            || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
+
+        final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
+        if (inputRequirementPresent) {
+            inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
+        } else {
+            inputRequirement = Requirement.INPUT_ALLOWED;
+        }
+
+        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
+    }
+
+    /**
+     * @return comments about this specific processor instance
+     */
+    @Override
+    public String getComments() {
+        return comments.get();
+    }
+
+    /**
+     * Provides and opportunity to retain information about this particular processor instance
+     *
+     * @param comments new comments
+     */
+    @Override
+    public void setComments(final String comments) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.comments.set(comments);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public ScheduledState getScheduledState() {
+        return scheduledState.get();
+    }
+
+    @Override
+    public Position getPosition() {
+        return position.get();
+    }
+
+    @Override
+    public void setPosition(Position position) {
+        this.position.set(position);
+    }
+
+    @Override
+    public Map<String, String> getStyle() {
+        return style.get();
+    }
+
+    @Override
+    public void setStyle(final Map<String, String> style) {
+        if (style != null) {
+            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
+        }
+    }
+
+    @Override
+    public String getIdentifier() {
+        return identifier.get();
+    }
+
+    /**
+     * @return if true flow file content generated by this processor is considered loss tolerant
+     */
+    @Override
+    public boolean isLossTolerant() {
+        return lossTolerant.get();
+    }
+
+    @Override
+    public boolean isIsolated() {
+        return isolated.get();
+    }
+
+    /**
+     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
+     */
+    @Override
+    public boolean isTriggerWhenEmpty() {
+        return triggerWhenEmpty;
+    }
+
+    /**
+     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
+     */
+    @Override
+    public boolean isSideEffectFree() {
+        return sideEffectFree;
+    }
+
+    @Override
+    public boolean isHighThroughputSupported() {
+        return batchSupported;
+    }
+
+    /**
+     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+     */
+    @Override
+    public boolean isTriggerWhenAnyDestinationAvailable() {
+        return triggerWhenAnyDestinationAvailable;
+    }
+
+    /**
+     * Indicates whether flow file content made by this processor must be persisted
+     *
+     * @param lossTolerant tolerant
+     */
+    @Override
+    public void setLossTolerant(final boolean lossTolerant) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.lossTolerant.set(lossTolerant);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Indicates whether the processor runs on only the primary node.
+     *
+     * @param isolated isolated
+     */
+    public void setIsolated(final boolean isolated) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.isolated.set(isolated);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isAutoTerminated(final Relationship relationship) {
+        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
+        if (terminatable == null) {
+            return false;
+        }
+        return terminatable.contains(relationship);
+    }
+
+    @Override
+    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+
+            for (final Relationship rel : terminate) {
+                if (!getConnections(rel).isEmpty()) {
+                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
+                }
+            }
+            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
+     */
+    @Override
+    public Set<Relationship> getAutoTerminatedRelationships() {
+        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
+        if (relationships == null) {
+            relationships = new HashSet<>();
+        }
+        return Collections.unmodifiableSet(relationships);
+    }
+
+    @Override
+    public String getName() {
+        return name.get();
+    }
+
+    /**
+     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
+     */
+    @SuppressWarnings("deprecation")
+    public String getProcessorDescription() {
+        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
+        String description = null;
+        if (capDesc != null) {
+            description = capDesc.value();
+        } else {
+            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
+            if (deprecatedCapDesc != null) {
+                description = deprecatedCapDesc.value();
+            }
+        }
+
+        return description;
+    }
+
+    @Override
+    public void setName(final String name) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            this.name.set(name);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
+     */
+    @Override
+    public long getSchedulingPeriod(final TimeUnit timeUnit) {
+        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+    }
+
+    @Override
+    public boolean isEventDrivenSupported() {
+        readLock.lock();
+        try {
+            return this.eventDrivenSupported;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    /**
+     * Updates the Scheduling Strategy used for this Processor
+     *
+     * @param schedulingStrategy strategy
+     *
+     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
+     */
+    @Override
+    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
+        writeLock.lock();
+        try {
+            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
+                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
+                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
+                // Mode. Instead, we will simply leave it in Timer-Driven mode
+                return;
+            }
+
+            this.schedulingStrategy = schedulingStrategy;
+            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * @return the currently configured scheduling strategy
+     */
+    @Override
+    public SchedulingStrategy getSchedulingStrategy() {
+        readLock.lock();
+        try {
+            return this.schedulingStrategy;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public String getSchedulingPeriod() {
+        return schedulingPeriod.get();
+    }
+
+    @Override
+    public void setScheduldingPeriod(final String schedulingPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+
+            switch (schedulingStrategy) {
+                case CRON_DRIVEN: {
+                    try {
+                        new CronExpression(schedulingPeriod);
+                    } catch (final Exception e) {
+                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
+                    }
+                }
+                    break;
+                case PRIMARY_NODE_ONLY:
+                case TIMER_DRIVEN: {
+                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+                    if (schedulingNanos < 0) {
+                        throw new IllegalArgumentException("Scheduling Period must be positive");
+                    }
+                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+                }
+                    break;
+                case EVENT_DRIVEN:
+                default:
+                    return;
+            }
+
+            this.schedulingPeriod.set(schedulingPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public long getRunDuration(final TimeUnit timeUnit) {
+        readLock.lock();
+        try {
+            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
+        writeLock.lock();
+        try {
+            if (duration < 0) {
+                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
+            }
+
+            this.runNanos = timeUnit.toNanos(duration);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public long getYieldPeriod(final TimeUnit timeUnit) {
+        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+    }
+
+    @Override
+    public String getYieldPeriod() {
+        return yieldPeriod.get();
+    }
+
+    @Override
+    public void setYieldPeriod(final String yieldPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+            if (yieldMillis < 0) {
+                throw new IllegalArgumentException("Yield duration must be positive");
+            }
+            this.yieldPeriod.set(yieldPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+     * methods.
+     */
+    @Override
+    public void yield() {
+        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+        yield(yieldMillis, TimeUnit.MILLISECONDS);
+
+        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
+        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
+    }
+
+    @Override
+    public void yield(final long period, final TimeUnit timeUnit) {
+        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
+        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+
+        processScheduler.yield(this);
+    }
+
+    /**
+     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
+     */
+    @Override
+    public long getYieldExpiration() {
+        return yieldExpiration.get();
+    }
+
+    @Override
+    public long getPenalizationPeriod(final TimeUnit timeUnit) {
+        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+    }
+
+    @Override
+    public String getPenalizationPeriod() {
+        return penalizationPeriod.get();
+    }
+
+    @Override
+    public void setPenalizationPeriod(final String penalizationPeriod) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
+            if (penalizationMillis < 0) {
+                throw new IllegalArgumentException("Penalization duration must be positive");
+            }
+            this.penalizationPeriod.set(penalizationPeriod);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    /**
+     * Determines the number of concurrent tasks that may be running for this processor.
+     *
+     * @param taskCount a number of concurrent tasks this processor may have running
+     * @throws IllegalArgumentException if the given value is less than 1
+     */
+    @Override
+    public void setMaxConcurrentTasks(final int taskCount) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+            }
+            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
+                throw new IllegalArgumentException();
+            }
+            if (!triggeredSerially) {
+                concurrentTaskCount.set(taskCount);
+            }
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isTriggeredSerially() {
+        return triggeredSerially;
+    }
+
+    /**
+     * @return the number of tasks that may execute concurrently for this processor
+     */
+    @Override
+    public int getMaxConcurrentTasks() {
+        return concurrentTaskCount.get();
+    }
+
+    @Override
+    public LogLevel getBulletinLevel() {
+        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
+    }
+
+    @Override
+    public void setBulletinLevel(final LogLevel level) {
+        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
+    }
+
+    @Override
+    public Set<Connection> getConnections() {
+        final Set<Connection> allConnections = new HashSet<>();
+        readLock.lock();
+        try {
+            for (final Set<Connection> connectionSet : connections.values()) {
+                allConnections.addAll(connectionSet);
+            }
+        } finally {
+            readLock.unlock();
+        }
+
+        return allConnections;
+    }
+
+    @Override
+    public List<Connection> getIncomingConnections() {
+        return incomingConnectionsRef.get();
+    }
+
+    @Override
+    public Set<Connection> getConnections(final Relationship relationship) {
+        final Set<Connection> applicableConnections;
+        readLock.lock();
+        try {
+            applicableConnections = connections.get(relationship);
+        } finally {
+            readLock.unlock();
+        }
+        return (applicableConnections == null) ? Collections.<Connection> emptySet() : Collections.unmodifiableSet(applicableConnections);
+    }
+
+    @Override
+    public void addConnection(final Connection connection) {
+        Objects.requireNonNull(connection, "connection cannot be null");
+
+        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
+            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
+        }
+
+        writeLock.lock();
+        try {
+            List<Connection> updatedIncoming = null;
+            if (connection.getDestination().equals(this)) {
+                // don't add the connection twice. This may occur if we have a self-loop because we will be told
+                // to add the connection once because we are the source and again because we are the destination.
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                updatedIncoming = new ArrayList<>(incomingConnections);
+                if (!updatedIncoming.contains(connection)) {
+                    updatedIncoming.add(connection);
+                }
+            }
+
+            if (connection.getSource().equals(this)) {
+                // don't add the connection twice. This may occur if we have a self-loop because we will be told
+                // to add the connection once because we are the source and again because we are the destination.
+                if (!destinations.containsKey(connection)) {
+                    for (final Relationship relationship : connection.getRelationships()) {
+                        final Relationship rel = getRelationship(relationship.getName());
+                        Set<Connection> set = connections.get(rel);
+                        if (set == null) {
+                            set = new HashSet<>();
+                            connections.put(rel, set);
+                        }
+
+                        set.add(connection);
+
+                        destinations.put(connection, connection.getDestination());
+                    }
+
+                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+                    if (autoTerminated != null) {
+                        autoTerminated.removeAll(connection.getRelationships());
+                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
+                    }
+                }
+            }
+
+            if (updatedIncoming != null) {
+                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+            }
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean hasIncomingConnection() {
+        return !incomingConnectionsRef.get().isEmpty();
+    }
+
+    @Override
+    public void updateConnection(final Connection connection) throws IllegalStateException {
+        if (requireNonNull(connection).getSource().equals(this)) {
+            writeLock.lock();
+            try {
+                //
+                // update any relationships
+                //
+                // first check if any relations were removed.
+                final List<Relationship> existingRelationships = new ArrayList<>();
+                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
+                    if (entry.getValue().contains(connection)) {
+                        existingRelationships.add(entry.getKey());
+                    }
+                }
+
+                for (final Relationship rel : connection.getRelationships()) {
+                    if (!existingRelationships.contains(rel)) {
+                        // relationship was removed. Check if this is legal.
+                        final Set<Connection> connectionsForRelationship = getConnections(rel);
+                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
+                            // if we are running and we do not terminate undefined relationships and this is the only
+                            // connection that defines the given relationship, and that relationship is required,
+                            // then it is not legal to remove this relationship from this connection.
+                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
+                                + this + ", which is currently running");
+                        }
+                    }
+                }
+
+                // remove the connection from any list that currently contains
+                for (final Set<Connection> list : connections.values()) {
+                    list.remove(connection);
+                }
+
+                // add the connection in for all relationships listed.
+                for (final Relationship rel : connection.getRelationships()) {
+                    Set<Connection> set = connections.get(rel);
+                    if (set == null) {
+                        set = new HashSet<>();
+                        connections.put(rel, set);
+                    }
+                    set.add(connection);
+                }
+
+                // update to the new destination
+                destinations.put(connection, connection.getDestination());
+
+                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+                if (autoTerminated != null) {
+                    autoTerminated.removeAll(connection.getRelationships());
+                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
+                }
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (connection.getDestination().equals(this)) {
+            writeLock.lock();
+            try {
+                // update our incoming connections -- we can just remove & re-add the connection to
+                // update the list.
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+                updatedIncoming.remove(connection);
+                updatedIncoming.add(connection);
+                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+            } finally {
+                writeLock.unlock();
+            }
+        }
+    }
+
+    @Override
+    public void removeConnection(final Connection connection) {
+        boolean connectionRemoved = false;
+
+        if (requireNonNull(connection).getSource().equals(this)) {
+            for (final Relationship relationship : connection.getRelationships()) {
+                final Set<Connection> connectionsForRelationship = getConnections(relationship);
+                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
+                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
+                }
+            }
+
+            writeLock.lock();
+            try {
+                for (final Set<Connection> connectionList : this.connections.values()) {
+                    connectionList.remove(connection);
+                }
+
+                connectionRemoved = (destinations.remove(connection) != null);
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (connection.getDestination().equals(this)) {
+            writeLock.lock();
+            try {
+                final List<Connection> incomingConnections = incomingConnectionsRef.get();
+                if (incomingConnections.contains(connection)) {
+                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+                    updatedIncoming.remove(connection);
+                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+                    return;
+                }
+            } finally {
+                writeLock.unlock();
+            }
+        }
+
+        if (!connectionRemoved) {
+            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
+        }
+    }
+
+    /**
+     * @param relationshipName name
+     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
+     */
+    @Override
+    public Relationship getRelationship(final String relationshipName) {
+        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
+        Relationship returnRel = specRel;
+
+        final Set<Relationship> relationships;
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            relationships = processor.getRelationships();
+        }
+
+        for (final Relationship rel : relationships) {
+            if (rel.equals(specRel)) {
+                returnRel = rel;
+                break;
+            }
+        }
+        return returnRel;
+    }
+
+    @Override
+    public Processor getProcessor() {
+        return this.processor;
+    }
+
+    /**
+     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
+     */
+    public Set<Connectable> getDestinations() {
+        final Set<Connectable> nonSelfDestinations = new HashSet<>();
+        readLock.lock();
+        try {
+            for (final Connectable connectable : destinations.values()) {
+                if (connectable != this) {
+                    nonSelfDestinations.add(connectable);
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+        return nonSelfDestinations;
+    }
+
+    public Set<Connectable> getDestinations(final Relationship relationship) {
+        readLock.lock();
+        try {
+            final Set<Connectable> destinationSet = new HashSet<>();
+            final Set<Connection> relationshipConnections = connections.get(relationship);
+            if (relationshipConnections != null) {
+                for (final Connection connection : relationshipConnections) {
+                    destinationSet.add(destinations.get(connection));
+                }
+            }
+            return destinationSet;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    public Set<Relationship> getUndefinedRelationships() {
+        final Set<Relationship> undefined = new HashSet<>();
+        readLock.lock();
+        try {
+            final Set<Relationship> relationships;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                relationships = processor.getRelationships();
+            }
+
+            if (relationships == null) {
+                return undefined;
+            }
+            for (final Relationship relation : relationships) {
+                final Set<Connection> connectionSet = this.connections.get(relation);
+                if (connectionSet == null || connectionSet.isEmpty()) {
+                    undefined.add(relation);
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+        return undefined;
+    }
+
+    /**
+     * Determines if the given node is a destination for this node
+     *
+     * @param node node
+     * @return true if is a direct destination node; false otherwise
+     */
+    boolean isRelated(final ProcessorNode node) {
+        readLock.lock();
+        try {
+            return this.destinations.containsValue(node);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isRunning() {
+        readLock.lock();
+        try {
+            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public int getActiveThreadCount() {
+        readLock.lock();
+        try {
+            return processScheduler.getActiveThreadCount(this);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isValid() {
+        readLock.lock();
+        try {
+            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+            final Collection<ValidationResult> validationResults;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                validationResults = getProcessor().validate(validationContext);
+            }
+
+            for (final ValidationResult result : validationResults) {
+                if (!result.isValid()) {
+                    return false;
+                }
+            }
+
+            for (final Relationship undef : getUndefinedRelationships()) {
+                if (!isAutoTerminated(undef)) {
+                    return false;
+                }
+            }
+
+            switch (getInputRequirement()) {
+                case INPUT_ALLOWED:
+                    break;
+                case INPUT_FORBIDDEN: {
+                    if (!getIncomingConnections().isEmpty()) {
+                        return false;
+                    }
+                    break;
+                }
+                case INPUT_REQUIRED: {
+                    if (getIncomingConnections().isEmpty()) {
+                        return false;
+                    }
+                    break;
+                }
+            }
+        } catch (final Throwable t) {
+            return false;
+        } finally {
+            readLock.unlock();
+        }
+
+        return true;
+    }
+
+    @Override
+    public Collection<ValidationResult> getValidationErrors() {
+        final List<ValidationResult> results = new ArrayList<>();
+        readLock.lock();
+        try {
+            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+            final Collection<ValidationResult> validationResults;
+            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+                validationResults = getProcessor().validate(validationContext);
+            }
+
+            for (final ValidationResult result : validationResults) {
+                if (!result.isValid()) {
+                    results.add(result);
+                }
+            }
+
+            for (final Relationship relationship : getUndefinedRelationships()) {
+                if (!isAutoTerminated(relationship)) {
+                    final ValidationResult error = new ValidationResult.Builder()
+                        .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
+                        .subject("Relationship " + relationship.getName())
+                        .valid(false)
+                        .build();
+                    results.add(error);
+                }
+            }
+
+            switch (getInputRequirement()) {
+                case INPUT_ALLOWED:
+                    break;
+                case INPUT_FORBIDDEN: {
+                    final int incomingConnCount = getIncomingConnections().size();
+                    if (incomingConnCount != 0) {
+                        results.add(new ValidationResult.Builder()
+                            .explanation("Processor is currently configured with " + incomingConnCount + " upstream connections but does not accept any upstream connections")
+                            .subject("Upstream Connections")
+                            .valid(false)
+                            .build());
+                    }
+                    break;
+                }
+                case INPUT_REQUIRED: {
+                    if (getIncomingConnections().isEmpty()) {
+                        results.add(new ValidationResult.Builder()
+                            .explanation("Processor requires an upstream connection but currently has none")
+                            .subject("Upstream Connections")
+                            .valid(false)
+                            .build());
+                    }
+                    break;
+                }
+            }
+        } catch (final Throwable t) {
+            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
+        } finally {
+            readLock.unlock();
+        }
+        return results;
+    }
+
+    @Override
+    public Requirement getInputRequirement() {
+        return inputRequirement;
+    }
+
+    /**
+     * Establishes node equality (based on the processor's identifier)
+     *
+     * @param other node
+     * @return true if equal
+     */
+    @Override
+    public boolean equals(final Object other) {
+        if (!(other instanceof ProcessorNode)) {
+            return false;
+        }
+        final ProcessorNode on = (ProcessorNode) other;
+        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
+    }
+
+    @Override
+    public int hashCode() {
+        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
+    }
+
+    @Override
+    public Collection<Relationship> getRelationships() {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            return getProcessor().getRelationships();
+        }
+    }
+
+    @Override
+    public String toString() {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            return getProcessor().toString();
+        }
+    }
+
+    @Override
+    public ProcessGroup getProcessGroup() {
+        return processGroup.get();
+    }
+
+    @Override
+    public void setProcessGroup(final ProcessGroup group) {
+        writeLock.lock();
+        try {
+            this.processGroup.set(group);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
+        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+            processor.onTrigger(context, sessionFactory);
+        }
+    }
+
+    @Override
+    public ConnectableType getConnectableType() {
+        return ConnectableType.PROCESSOR;
+    }
+
+    @Override
+    public void setScheduledState(final ScheduledState scheduledState) {
+        this.scheduledState.set(scheduledState);
+        if (!scheduledState.equals(ScheduledState.RUNNING)) { // if user stops processor, clear yield expiration
+            yieldExpiration.set(0L);
+        }
+    }
+
+    @Override
+    public void setAnnotationData(final String data) {
+        writeLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
+            }
+
+            this.annotationData.set(data);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public String getAnnotationData() {
+        return annotationData.get();
+    }
+
+    @Override
+    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
+        return getValidationErrors();
+    }
+
+    @Override
+    public void verifyCanDelete() throws IllegalStateException {
+        verifyCanDelete(false);
+    }
+
+    @Override
+    public void verifyCanDelete(final boolean ignoreConnections) {
+        readLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException(this + " is running");
+            }
+
+            if (!ignoreConnections) {
+                for (final Set<Connection> connectionSet : connections.values()) {
+                    for (final Connection connection : connectionSet) {
+                        connection.verifyCanDelete();
+                    }
+                }
+
+                for (final Connection connection : incomingConnectionsRef.get()) {
+                    if (connection.getSource().equals(this)) {
+                        connection.verifyCanDelete();
+                    } else {
+                        throw new IllegalStateException(this + " is the destination of another component");
+                    }
+                }
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanStart() {
+        readLock.lock();
+        try {
+            switch (getScheduledState()) {
+                case DISABLED:
+                    throw new IllegalStateException(this + " cannot be started because it is disabled");
+                case RUNNING:
+                    throw new IllegalStateException(this + " cannot be started because it is already running");
+                case STOPPED:
+                    break;
+            }
+            verifyNoActiveThreads();
+
+            if (!isValid()) {
+                throw new IllegalStateException(this + " is not in a valid state");
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
+        switch (getScheduledState()) {
+            case DISABLED:
+                throw new IllegalStateException(this + " cannot be started because it is disabled");
+            case RUNNING:
+                throw new IllegalStateException(this + " cannot be started because it is already running");
+            case STOPPED:
+                break;
+        }
+        verifyNoActiveThreads();
+
+        final Set<String> ids = new HashSet<>();
+        for (final ControllerServiceNode node : ignoredReferences) {
+            ids.add(node.getIdentifier());
+        }
+
+        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
+        for (final ValidationResult result : validationResults) {
+            if (!result.isValid()) {
+                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
+            }
+        }
+    }
+
+    @Override
+    public void verifyCanStop() {
+        if (getScheduledState() != ScheduledState.RUNNING) {
+            throw new IllegalStateException(this + " is not scheduled to run");
+        }
+    }
+
+    @Override
+    public void verifyCanUpdate() {
+        readLock.lock();
+        try {
+            if (isRunning()) {
+                throw new IllegalStateException(this + " is not stopped");
+            }
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanEnable() {
+        readLock.lock();
+        try {
+            if (getScheduledState() != ScheduledState.DISABLED) {
+                throw new IllegalStateException(this + " is not disabled");
+            }
+
+            verifyNoActiveThreads();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public void verifyCanDisable() {
+        readLock.lock();
+        try {
+            if (getScheduledState() != ScheduledState.STOPPED) {
+                throw new IllegalStateException(this + " is not stopped");
+            }
+            verifyNoActiveThreads();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    private void verifyNoActiveThreads() throws IllegalStateException {
+        final int threadCount = processScheduler.getActiveThreadCount(this);
+        if (threadCount > 0) {
+            throw new IllegalStateException(this + " has " + threadCount + " threads still active");
+        }
+    }
+
+    @Override
+    public void verifyModifiable() throws IllegalStateException {
+        if (isRunning()) {
+            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+        }
+    }
 }


[15/17] nifi git commit: NIFI-810: rebased from master

Posted by ma...@apache.org.
NIFI-810: rebased from master


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/b974445d
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/b974445d
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/b974445d

Branch: refs/heads/NIFI-810-InputRequirement
Commit: b974445ddd38ec7e84995225b86987e6af1af52c
Parents: 5ecdb18 2215bc8
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:28:39 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:28:39 2015 -0400

----------------------------------------------------------------------
 .../nifi/controller/StandardProcessorNode.java    | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------



[16/17] nifi git commit: NIFI-810: Addressed several checkstyle violations

Posted by ma...@apache.org.
NIFI-810: Addressed several checkstyle violations


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/ccfb57fe
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/ccfb57fe
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/ccfb57fe

Branch: refs/heads/NIFI-810-InputRequirement
Commit: ccfb57fe9ff43f11319dcb1625bfc78b1d88f56a
Parents: b974445
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:48:51 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:48:51 2015 -0400

----------------------------------------------------------------------
 .../annotation/behavior/InputRequirement.java   |  70 +-
 .../nifi/processors/aws/s3/PutS3Object.java     |  46 +-
 .../apache/nifi/controller/ProcessorNode.java   |  88 +--
 .../nifi/controller/StandardProcessorNode.java  |  10 +-
 .../standard/Base64EncodeContent.java           | 168 ++---
 .../nifi/processors/standard/ControlRate.java   | 672 +++++++++----------
 6 files changed, 534 insertions(+), 520 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
----------------------------------------------------------------------
diff --git a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
index 97e6b88..13f442c 100644
--- a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
+++ b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.nifi.annotation.behavior;
 
 import java.lang.annotation.Documented;
@@ -21,31 +37,31 @@ import java.lang.annotation.Target;
 @Retention(RetentionPolicy.RUNTIME)
 @Inherited
 public @interface InputRequirement {
-	Requirement value();
-	
-	public static enum Requirement {
-		/**
-		 * This value is used to indicate that the Processor requires input from other Processors
-		 * in order to run. As a result, the Processor will not be valid if it does not have any
-		 * incoming connections.
-		 */
-		INPUT_REQUIRED,
-		
-		/**
-		 * This value is used to indicate that the Processor will consume data from an incoming
-		 * connection but does not require an incoming connection in order to perform its task.
-		 * If the {@link InputRequirement} annotation is not present, this is the default value
-		 * that is used.
-		 */
-		INPUT_ALLOWED,
-		
-		/**
-		 * This value is used to indicate that the Processor is a "Source Processor" and does
-		 * not accept incoming connections. Because the Processor does not pull FlowFiles from
-		 * an incoming connection, it can be very confusing for users who create incoming connections
-		 * to the Processor. As a result, this value can be used in order to clarify that incoming
-		 * connections will not be used. This prevents the user from even creating such a connection.
-		 */
-		INPUT_FORBIDDEN;
-	}
+    Requirement value();
+
+    public static enum Requirement {
+        /**
+         * This value is used to indicate that the Processor requires input from other Processors
+         * in order to run. As a result, the Processor will not be valid if it does not have any
+         * incoming connections.
+         */
+        INPUT_REQUIRED,
+
+        /**
+         * This value is used to indicate that the Processor will consume data from an incoming
+         * connection but does not require an incoming connection in order to perform its task.
+         * If the {@link InputRequirement} annotation is not present, this is the default value
+         * that is used.
+         */
+        INPUT_ALLOWED,
+
+        /**
+         * This value is used to indicate that the Processor is a "Source Processor" and does
+         * not accept incoming connections. Because the Processor does not pull FlowFiles from
+         * an incoming connection, it can be very confusing for users who create incoming connections
+         * to the Processor. As a result, this value can be used in order to clarify that incoming
+         * connections will not be used. This prevents the user from even creating such a connection.
+         */
+        INPUT_FORBIDDEN;
+    }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 7398c4e..c7212f5 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -59,10 +59,8 @@ import com.amazonaws.services.s3.model.StorageClass;
 @InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Archive", "Put"})
 @CapabilityDescription("Puts FlowFiles to an Amazon S3 Bucket")
-@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
-        value = "The value of a User-Defined Metadata field to add to the S3 Object",
-        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
-        supportsExpressionLanguage = true)
+@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object", value = "The value of a User-Defined Metadata field to add to the S3 Object",
+    description = "Allows user-defined metadata to be added to the S3 object as key/value pairs", supportsExpressionLanguage = true)
 @ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
 @WritesAttributes({
     @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object that was put to S3"),
@@ -72,22 +70,22 @@ import com.amazonaws.services.s3.model.StorageClass;
 public class PutS3Object extends AbstractS3Processor {
 
     public static final PropertyDescriptor EXPIRATION_RULE_ID = new PropertyDescriptor.Builder()
-            .name("Expiration Time Rule")
-            .required(false)
-            .expressionLanguageSupported(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .build();
+        .name("Expiration Time Rule")
+        .required(false)
+        .expressionLanguageSupported(true)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .build();
 
     public static final PropertyDescriptor STORAGE_CLASS = new PropertyDescriptor.Builder()
-            .name("Storage Class")
-            .required(true)
-            .allowableValues(StorageClass.Standard.name(), StorageClass.ReducedRedundancy.name())
-            .defaultValue(StorageClass.Standard.name())
-            .build();
+        .name("Storage Class")
+        .required(true)
+        .allowableValues(StorageClass.Standard.name(), StorageClass.ReducedRedundancy.name())
+        .defaultValue(StorageClass.Standard.name())
+        .build();
 
     public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
-            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, STORAGE_CLASS, REGION, TIMEOUT, EXPIRATION_RULE_ID,
-                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
+        Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, STORAGE_CLASS, REGION, TIMEOUT, EXPIRATION_RULE_ID,
+            FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
 
     @Override
     protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
@@ -97,15 +95,15 @@ public class PutS3Object extends AbstractS3Processor {
     @Override
     protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
         return new PropertyDescriptor.Builder()
-                .name(propertyDescriptorName)
-                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-                .expressionLanguageSupported(true)
-                .dynamic(true)
-                .build();
+            .name(propertyDescriptorName)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+            .expressionLanguageSupported(true)
+            .dynamic(true)
+            .build();
     }
 
     @Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) {
+    public void onTrigger(final ProcessContext context, final ProcessSession session) {
         FlowFile flowFile = session.get();
         if (flowFile == null) {
             return;
@@ -176,9 +174,9 @@ public class PutS3Object extends AbstractS3Processor {
             final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
             session.getProvenanceReporter().send(flowFile, url, millis);
 
-            getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", new Object[]{ff, millis});
+            getLogger().info("Successfully put {} to Amazon S3 in {} milliseconds", new Object[] {ff, millis});
         } catch (final ProcessException | AmazonClientException pe) {
-            getLogger().error("Failed to put {} to Amazon S3 due to {}", new Object[]{flowFile, pe});
+            getLogger().error("Failed to put {} to Amazon S3 due to {}", new Object[] {flowFile, pe});
             session.transfer(flowFile, REL_FAILURE);
         }
     }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
index 2f72d0f..d340c77 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
@@ -31,72 +31,72 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
 
 public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
 
-	public ProcessorNode(final Processor processor, final String id,
-		final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
-		super(processor, id, validationContextFactory, serviceProvider);
-	}
+    public ProcessorNode(final Processor processor, final String id,
+        final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
+        super(processor, id, validationContextFactory, serviceProvider);
+    }
 
-	public abstract boolean isIsolated();
+    public abstract boolean isIsolated();
 
-	public abstract boolean isTriggerWhenAnyDestinationAvailable();
+    public abstract boolean isTriggerWhenAnyDestinationAvailable();
 
-	@Override
-	public abstract boolean isSideEffectFree();
+    @Override
+    public abstract boolean isSideEffectFree();
 
-	public abstract boolean isTriggeredSerially();
+    public abstract boolean isTriggeredSerially();
 
-	public abstract boolean isEventDrivenSupported();
+    public abstract boolean isEventDrivenSupported();
 
-	public abstract boolean isHighThroughputSupported();
+    public abstract boolean isHighThroughputSupported();
 
-	public abstract Requirement getInputRequirement();
+    public abstract Requirement getInputRequirement();
 
-	@Override
-	public abstract boolean isValid();
+    @Override
+    public abstract boolean isValid();
 
-	public abstract void setScheduledState(ScheduledState scheduledState);
+    public abstract void setScheduledState(ScheduledState scheduledState);
 
-	public abstract void setBulletinLevel(LogLevel bulletinLevel);
+    public abstract void setBulletinLevel(LogLevel bulletinLevel);
 
-	public abstract LogLevel getBulletinLevel();
+    public abstract LogLevel getBulletinLevel();
 
-	public abstract Processor getProcessor();
+    public abstract Processor getProcessor();
 
-	public abstract void yield(long period, TimeUnit timeUnit);
+    public abstract void yield(long period, TimeUnit timeUnit);
 
-	public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
+    public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
 
-	public abstract Set<Relationship> getAutoTerminatedRelationships();
+    public abstract Set<Relationship> getAutoTerminatedRelationships();
 
-	public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
+    public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
 
-	@Override
-	public abstract SchedulingStrategy getSchedulingStrategy();
+    @Override
+    public abstract SchedulingStrategy getSchedulingStrategy();
 
-	public abstract void setRunDuration(long duration, TimeUnit timeUnit);
+    public abstract void setRunDuration(long duration, TimeUnit timeUnit);
 
-	public abstract long getRunDuration(TimeUnit timeUnit);
+    public abstract long getRunDuration(TimeUnit timeUnit);
 
-	public abstract Map<String, String> getStyle();
+    public abstract Map<String, String> getStyle();
 
-	public abstract void setStyle(Map<String, String> style);
+    public abstract void setStyle(Map<String, String> style);
 
-	/**
-	 * @return the number of threads (concurrent tasks) currently being used by
-	 * this Processor
-	 */
-	public abstract int getActiveThreadCount();
+    /**
+     * @return the number of threads (concurrent tasks) currently being used by
+     *         this Processor
+     */
+    public abstract int getActiveThreadCount();
 
-	/**
-	 * Verifies that this Processor can be started if the provided set of
-	 * services are enabled. This is introduced because we need to verify that
-	 * all components can be started before starting any of them. In order to do
-	 * that, we need to know that this component can be started if the given
-	 * services are enabled, as we will then enable the given services before
-	 * starting this component.
-	 *
-	 * @param ignoredReferences to ignore
-	 */
-	public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
+    /**
+     * Verifies that this Processor can be started if the provided set of
+     * services are enabled. This is introduced because we need to verify that
+     * all components can be started before starting any of them. In order to do
+     * that, we need to know that this component can be started if the given
+     * services are enabled, as we will then enable the given services before
+     * starting this component.
+     *
+     * @param ignoredReferences to ignore
+     */
+    public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index f69c510..ad22c6d 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -1306,9 +1306,9 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
     }
 
     @Override
-	public void verifyModifiable() throws IllegalStateException {
-		if (isRunning()) {
-			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-		}
-	}>>>>>>>2215 bc848b7db395b2ca9ac7cc4dc10891393721
+    public void verifyModifiable() throws IllegalStateException {
+        if (isRunning()) {
+            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
index 816b407..db45109 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
@@ -56,99 +56,99 @@ import org.apache.nifi.util.StopWatch;
 @InputRequirement(Requirement.INPUT_REQUIRED)
 public class Base64EncodeContent extends AbstractProcessor {
 
-	public static final String ENCODE_MODE = "Encode";
-	public static final String DECODE_MODE = "Decode";
+    public static final String ENCODE_MODE = "Encode";
+    public static final String DECODE_MODE = "Decode";
 
-	public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
-		.name("Mode")
-		.description("Specifies whether the content should be encoded or decoded")
-		.required(true)
-		.allowableValues(ENCODE_MODE, DECODE_MODE)
-		.defaultValue(ENCODE_MODE)
-		.build();
-	public static final Relationship REL_SUCCESS = new Relationship.Builder()
-		.name("success")
-		.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
-		.build();
-	public static final Relationship REL_FAILURE = new Relationship.Builder()
-		.name("failure")
-		.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
-		.build();
+    public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
+        .name("Mode")
+        .description("Specifies whether the content should be encoded or decoded")
+        .required(true)
+        .allowableValues(ENCODE_MODE, DECODE_MODE)
+        .defaultValue(ENCODE_MODE)
+        .build();
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+        .name("success")
+        .description("Any FlowFile that is successfully encoded or decoded will be routed to success")
+        .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+        .name("failure")
+        .description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
+        .build();
 
-	private List<PropertyDescriptor> properties;
-	private Set<Relationship> relationships;
+    private List<PropertyDescriptor> properties;
+    private Set<Relationship> relationships;
 
-	@Override
-	protected void init(final ProcessorInitializationContext context) {
-		final List<PropertyDescriptor> properties = new ArrayList<>();
-		properties.add(MODE);
-		this.properties = Collections.unmodifiableList(properties);
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        final List<PropertyDescriptor> properties = new ArrayList<>();
+        properties.add(MODE);
+        this.properties = Collections.unmodifiableList(properties);
 
-		final Set<Relationship> relationships = new HashSet<>();
-		relationships.add(REL_SUCCESS);
-		relationships.add(REL_FAILURE);
-		this.relationships = Collections.unmodifiableSet(relationships);
-	}
+        final Set<Relationship> relationships = new HashSet<>();
+        relationships.add(REL_SUCCESS);
+        relationships.add(REL_FAILURE);
+        this.relationships = Collections.unmodifiableSet(relationships);
+    }
 
-	@Override
-	public Set<Relationship> getRelationships() {
-		return relationships;
-	}
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
 
-	@Override
-	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-		return properties;
-	}
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
 
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) {
-		FlowFile flowFile = session.get();
-		if (flowFile == null) {
-			return;
-		}
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) {
+        FlowFile flowFile = session.get();
+        if (flowFile == null) {
+            return;
+        }
 
-		final ProcessorLog logger = getLogger();
+        final ProcessorLog logger = getLogger();
 
-		boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
-		try {
-			final StopWatch stopWatch = new StopWatch(true);
-			if (encode) {
-				flowFile = session.write(flowFile, new StreamCallback() {
-					@Override
-					public void process(InputStream in, OutputStream out) throws IOException {
-						try (Base64OutputStream bos = new Base64OutputStream(out)) {
-							int len = -1;
-							byte[] buf = new byte[8192];
-							while ((len = in.read(buf)) > 0) {
-								bos.write(buf, 0, len);
-							}
-							bos.flush();
-						}
-					}
-				});
-			} else {
-				flowFile = session.write(flowFile, new StreamCallback() {
-					@Override
-					public void process(InputStream in, OutputStream out) throws IOException {
-						try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
-							int len = -1;
-							byte[] buf = new byte[8192];
-							while ((len = bis.read(buf)) > 0) {
-								out.write(buf, 0, len);
-							}
-							out.flush();
-						}
-					}
-				});
-			}
+        boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
+        try {
+            final StopWatch stopWatch = new StopWatch(true);
+            if (encode) {
+                flowFile = session.write(flowFile, new StreamCallback() {
+                    @Override
+                    public void process(InputStream in, OutputStream out) throws IOException {
+                        try (Base64OutputStream bos = new Base64OutputStream(out)) {
+                            int len = -1;
+                            byte[] buf = new byte[8192];
+                            while ((len = in.read(buf)) > 0) {
+                                bos.write(buf, 0, len);
+                            }
+                            bos.flush();
+                        }
+                    }
+                });
+            } else {
+                flowFile = session.write(flowFile, new StreamCallback() {
+                    @Override
+                    public void process(InputStream in, OutputStream out) throws IOException {
+                        try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
+                            int len = -1;
+                            byte[] buf = new byte[8192];
+                            while ((len = bis.read(buf)) > 0) {
+                                out.write(buf, 0, len);
+                            }
+                            out.flush();
+                        }
+                    }
+                });
+            }
 
-			logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
-			session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-			session.transfer(flowFile, REL_SUCCESS);
-		} catch (ProcessException e) {
-			logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
-			session.transfer(flowFile, REL_FAILURE);
-		}
-	}
+            logger.info("Successfully {} {}", new Object[] {encode ? "encoded" : "decoded", flowFile});
+            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
+            session.transfer(flowFile, REL_SUCCESS);
+        } catch (ProcessException e) {
+            logger.error("Failed to {} {} due to {}", new Object[] {encode ? "encode" : "decode", flowFile, e});
+            session.transfer(flowFile, REL_FAILURE);
+        }
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/ccfb57fe/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index a45c211..0847472 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -61,340 +61,340 @@ import org.apache.nifi.util.timebuffer.TimedBuffer;
 @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors.")
 public class ControlRate extends AbstractProcessor {
 
-	public static final String DATA_RATE = "data rate";
-	public static final String FLOWFILE_RATE = "flowfile count";
-	public static final String ATTRIBUTE_RATE = "attribute value";
-
-	public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
-		.name("Rate Control Criteria")
-		.description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
-		.required(true)
-		.allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
-		.defaultValue(DATA_RATE)
-		.build();
-	public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
-		.name("Maximum Rate")
-		.description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
-			+ "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
-		.required(true)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
-		.build();
-	public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-		.name("Rate Controlled Attribute")
-		.description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
-			+ "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
-			+ "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
-		.required(false)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-		.expressionLanguageSupported(false)
-		.build();
-	public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
-		.name("Time Duration")
-		.description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
-		.required(true)
-		.addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
-		.defaultValue("1 min")
-		.build();
-	public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-		.name("Grouping Attribute")
-		.description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
-			+ "each value specified by the attribute with this name. Changing this value resets the rate counters.")
-		.required(false)
-		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-		.expressionLanguageSupported(false)
-		.build();
-
-	public static final Relationship REL_SUCCESS = new Relationship.Builder()
-		.name("success")
-		.description("All FlowFiles are transferred to this relationship")
-		.build();
-	public static final Relationship REL_FAILURE = new Relationship.Builder()
-		.name("failure")
-		.description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
-		.build();
-
-	private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
-	private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
-
-	private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
-	private List<PropertyDescriptor> properties;
-	private Set<Relationship> relationships;
-	private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
-
-	@Override
-	protected void init(final ProcessorInitializationContext context) {
-		final List<PropertyDescriptor> properties = new ArrayList<>();
-		properties.add(RATE_CONTROL_CRITERIA);
-		properties.add(MAX_RATE);
-		properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
-		properties.add(TIME_PERIOD);
-		properties.add(GROUPING_ATTRIBUTE_NAME);
-		this.properties = Collections.unmodifiableList(properties);
-
-		final Set<Relationship> relationships = new HashSet<>();
-		relationships.add(REL_SUCCESS);
-		this.relationships = Collections.unmodifiableSet(relationships);
-	}
-
-	@Override
-	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-		return properties;
-	}
-
-	@Override
-	public Set<Relationship> getRelationships() {
-		return relationships;
-	}
-
-	@Override
-	protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-		final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
-
-		final Validator rateValidator;
-		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-			case DATA_RATE:
-				rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
-				break;
-			case ATTRIBUTE_RATE:
-				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-				final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-				if (rateAttr == null) {
-					validationResults.add(new ValidationResult.Builder()
-						.subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
-						.explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
-						.build());
-				}
-				break;
-			case FLOWFILE_RATE:
-			default:
-				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-				break;
-		}
-
-		final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
-		if (!rateResult.isValid()) {
-			validationResults.add(rateResult);
-		}
-
-		return validationResults;
-	}
-
-	@Override
-	public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
-		super.onPropertyModified(descriptor, oldValue, newValue);
-
-		if (descriptor.equals(RATE_CONTROL_CRITERIA)
-			|| descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
-			|| descriptor.equals(GROUPING_ATTRIBUTE_NAME)
-			|| descriptor.equals(TIME_PERIOD)) {
-			// if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
-			throttleMap.clear();
-		} else if (descriptor.equals(MAX_RATE)) {
-			final long newRate;
-			if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
-				newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
-			} else {
-				newRate = Long.parseLong(newValue);
-			}
-
-			for (final Throttle throttle : throttleMap.values()) {
-				throttle.setMaxRate(newRate);
-			}
-		}
-	}
-
-	@Override
-	public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-		final long lastClearTime = lastThrottleClearTime.get();
-		final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-		if (lastClearTime < throttleExpirationMillis) {
-			if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
-				final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
-				while (itr.hasNext()) {
-					final Map.Entry<String, Throttle> entry = itr.next();
-					final Throttle throttle = entry.getValue();
-					if (throttle.tryLock()) {
-						try {
-							if (throttle.lastUpdateTime() < lastClearTime) {
-								itr.remove();
-							}
-						} finally {
-							throttle.unlock();
-						}
-					}
-				}
-			}
-		}
-
-		// TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
-		FlowFile flowFile = session.get();
-		if (flowFile == null) {
-			return;
-		}
-
-		final ProcessorLog logger = getLogger();
-		final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
-		final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-		long rateValue;
-		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-			case DATA_RATE:
-				rateValue = flowFile.getSize();
-				break;
-			case FLOWFILE_RATE:
-				rateValue = 1;
-				break;
-			case ATTRIBUTE_RATE:
-				final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
-				if (attributeValue == null) {
-					logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
-					session.transfer(flowFile, REL_FAILURE);
-					return;
-				}
-
-				if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
-					logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
-						new Object[]{flowFile, rateControlAttributeName, attributeValue});
-					session.transfer(flowFile, REL_FAILURE);
-					return;
-				}
-				rateValue = Long.parseLong(attributeValue);
-				break;
-			default:
-				throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
-		}
-
-		final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
-		final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
-		Throttle throttle = throttleMap.get(groupName);
-		if (throttle == null) {
-			throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
-
-			final String maxRateValue = context.getProperty(MAX_RATE).getValue();
-			final long newRate;
-			if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
-				newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
-			} else {
-				newRate = Long.parseLong(maxRateValue);
-			}
-			throttle.setMaxRate(newRate);
-
-			throttleMap.put(groupName, throttle);
-		}
-
-		throttle.lock();
-		try {
-			if (throttle.tryAdd(rateValue)) {
-				logger.info("transferring {} to 'success'", new Object[]{flowFile});
-				session.transfer(flowFile, REL_SUCCESS);
-			} else {
-				flowFile = session.penalize(flowFile);
-				session.transfer(flowFile);
-			}
-		} finally {
-			throttle.unlock();
-		}
-	}
-
-	private static class TimestampedLong {
-
-		private final Long value;
-		private final long timestamp = System.currentTimeMillis();
-
-		public TimestampedLong(final Long value) {
-			this.value = value;
-		}
-
-		public Long getValue() {
-			return value;
-		}
-
-		public long getTimestamp() {
-			return timestamp;
-		}
-	}
-
-	private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
-
-		@Override
-		public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
-			if (oldValue == null && toAdd == null) {
-				return new TimestampedLong(0L);
-			} else if (oldValue == null) {
-				return toAdd;
-			} else if (toAdd == null) {
-				return oldValue;
-			}
-
-			return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
-		}
-
-		@Override
-		public TimestampedLong createNew() {
-			return new TimestampedLong(0L);
-		}
-
-		@Override
-		public long getTimestamp(TimestampedLong entity) {
-			return entity == null ? 0L : entity.getTimestamp();
-		}
-	}
-
-	private static class Throttle extends ReentrantLock {
-
-		private final AtomicLong maxRate = new AtomicLong(1L);
-		private final long timePeriodValue;
-		private final TimeUnit timePeriodUnit;
-		private final TimedBuffer<TimestampedLong> timedBuffer;
-		private final ProcessorLog logger;
-
-		private volatile long penalizationExpired;
-		private volatile long lastUpdateTime;
-
-		public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
-			this.timePeriodUnit = unit;
-			this.timePeriodValue = timePeriod;
-			this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
-			this.logger = logger;
-		}
-
-		public void setMaxRate(final long maxRate) {
-			this.maxRate.set(maxRate);
-		}
-
-		public long lastUpdateTime() {
-			return lastUpdateTime;
-		}
-
-		public boolean tryAdd(final long value) {
-			final long now = System.currentTimeMillis();
-			if (penalizationExpired > now) {
-				return false;
-			}
-
-			final long maxRateValue = maxRate.get();
-
-			final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
-			if (sum != null && sum.getValue() >= maxRateValue) {
-				logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
-				return false;
-			}
-
-			logger.debug("current sum for throttle is {}, so allowing rate of {} through",
-				new Object[]{sum == null ? 0 : sum.getValue(), value});
-
-			final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
-			if (transferred > maxRateValue) {
-				final long amountOver = transferred - maxRateValue;
-				// determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
-				final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
-				final double pct = (double) amountOver / (double) maxRateValue;
-				final long penalizationPeriod = (long) (milliDuration * pct);
-				this.penalizationExpired = now + penalizationPeriod;
-				logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
-			}
-
-			lastUpdateTime = now;
-			return true;
-		}
-	}
+    public static final String DATA_RATE = "data rate";
+    public static final String FLOWFILE_RATE = "flowfile count";
+    public static final String ATTRIBUTE_RATE = "attribute value";
+
+    public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
+        .name("Rate Control Criteria")
+        .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
+        .required(true)
+        .allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
+        .defaultValue(DATA_RATE)
+        .build();
+    public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
+        .name("Maximum Rate")
+        .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
+            + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
+        .required(true)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
+        .build();
+    public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+        .name("Rate Controlled Attribute")
+        .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
+            + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
+            + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
+        .required(false)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .expressionLanguageSupported(false)
+        .build();
+    public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
+        .name("Time Duration")
+        .description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
+        .required(true)
+        .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
+        .defaultValue("1 min")
+        .build();
+    public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+        .name("Grouping Attribute")
+        .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
+            + "each value specified by the attribute with this name. Changing this value resets the rate counters.")
+        .required(false)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .expressionLanguageSupported(false)
+        .build();
+
+    public static final Relationship REL_SUCCESS = new Relationship.Builder()
+        .name("success")
+        .description("All FlowFiles are transferred to this relationship")
+        .build();
+    public static final Relationship REL_FAILURE = new Relationship.Builder()
+        .name("failure")
+        .description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
+        .build();
+
+    private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
+    private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
+
+    private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
+    private List<PropertyDescriptor> properties;
+    private Set<Relationship> relationships;
+    private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        final List<PropertyDescriptor> properties = new ArrayList<>();
+        properties.add(RATE_CONTROL_CRITERIA);
+        properties.add(MAX_RATE);
+        properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
+        properties.add(TIME_PERIOD);
+        properties.add(GROUPING_ATTRIBUTE_NAME);
+        this.properties = Collections.unmodifiableList(properties);
+
+        final Set<Relationship> relationships = new HashSet<>();
+        relationships.add(REL_SUCCESS);
+        this.relationships = Collections.unmodifiableSet(relationships);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return properties;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @Override
+    protected Collection<ValidationResult> customValidate(final ValidationContext context) {
+        final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
+
+        final Validator rateValidator;
+        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+            case DATA_RATE:
+                rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
+                break;
+            case ATTRIBUTE_RATE:
+                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+                final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+                if (rateAttr == null) {
+                    validationResults.add(new ValidationResult.Builder()
+                        .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+                        .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+                        .build());
+                }
+                break;
+            case FLOWFILE_RATE:
+            default:
+                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+                break;
+        }
+
+        final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
+        if (!rateResult.isValid()) {
+            validationResults.add(rateResult);
+        }
+
+        return validationResults;
+    }
+
+    @Override
+    public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
+        super.onPropertyModified(descriptor, oldValue, newValue);
+
+        if (descriptor.equals(RATE_CONTROL_CRITERIA)
+            || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
+            || descriptor.equals(GROUPING_ATTRIBUTE_NAME)
+            || descriptor.equals(TIME_PERIOD)) {
+            // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
+            throttleMap.clear();
+        } else if (descriptor.equals(MAX_RATE)) {
+            final long newRate;
+            if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
+                newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
+            } else {
+                newRate = Long.parseLong(newValue);
+            }
+
+            for (final Throttle throttle : throttleMap.values()) {
+                throttle.setMaxRate(newRate);
+            }
+        }
+    }
+
+    @Override
+    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+        final long lastClearTime = lastThrottleClearTime.get();
+        final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+        if (lastClearTime < throttleExpirationMillis) {
+            if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
+                final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
+                while (itr.hasNext()) {
+                    final Map.Entry<String, Throttle> entry = itr.next();
+                    final Throttle throttle = entry.getValue();
+                    if (throttle.tryLock()) {
+                        try {
+                            if (throttle.lastUpdateTime() < lastClearTime) {
+                                itr.remove();
+                            }
+                        } finally {
+                            throttle.unlock();
+                        }
+                    }
+                }
+            }
+        }
+
+        // TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
+        FlowFile flowFile = session.get();
+        if (flowFile == null) {
+            return;
+        }
+
+        final ProcessorLog logger = getLogger();
+        final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
+        final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+        long rateValue;
+        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+            case DATA_RATE:
+                rateValue = flowFile.getSize();
+                break;
+            case FLOWFILE_RATE:
+                rateValue = 1;
+                break;
+            case ATTRIBUTE_RATE:
+                final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
+                if (attributeValue == null) {
+                    logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[] {flowFile, rateControlAttributeName});
+                    session.transfer(flowFile, REL_FAILURE);
+                    return;
+                }
+
+                if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
+                    logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
+                        new Object[] {flowFile, rateControlAttributeName, attributeValue});
+                    session.transfer(flowFile, REL_FAILURE);
+                    return;
+                }
+                rateValue = Long.parseLong(attributeValue);
+                break;
+            default:
+                throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
+        }
+
+        final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
+        final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
+        Throttle throttle = throttleMap.get(groupName);
+        if (throttle == null) {
+            throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
+
+            final String maxRateValue = context.getProperty(MAX_RATE).getValue();
+            final long newRate;
+            if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
+                newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
+            } else {
+                newRate = Long.parseLong(maxRateValue);
+            }
+            throttle.setMaxRate(newRate);
+
+            throttleMap.put(groupName, throttle);
+        }
+
+        throttle.lock();
+        try {
+            if (throttle.tryAdd(rateValue)) {
+                logger.info("transferring {} to 'success'", new Object[] {flowFile});
+                session.transfer(flowFile, REL_SUCCESS);
+            } else {
+                flowFile = session.penalize(flowFile);
+                session.transfer(flowFile);
+            }
+        } finally {
+            throttle.unlock();
+        }
+    }
+
+    private static class TimestampedLong {
+
+        private final Long value;
+        private final long timestamp = System.currentTimeMillis();
+
+        public TimestampedLong(final Long value) {
+            this.value = value;
+        }
+
+        public Long getValue() {
+            return value;
+        }
+
+        public long getTimestamp() {
+            return timestamp;
+        }
+    }
+
+    private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
+
+        @Override
+        public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
+            if (oldValue == null && toAdd == null) {
+                return new TimestampedLong(0L);
+            } else if (oldValue == null) {
+                return toAdd;
+            } else if (toAdd == null) {
+                return oldValue;
+            }
+
+            return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
+        }
+
+        @Override
+        public TimestampedLong createNew() {
+            return new TimestampedLong(0L);
+        }
+
+        @Override
+        public long getTimestamp(TimestampedLong entity) {
+            return entity == null ? 0L : entity.getTimestamp();
+        }
+    }
+
+    private static class Throttle extends ReentrantLock {
+
+        private final AtomicLong maxRate = new AtomicLong(1L);
+        private final long timePeriodValue;
+        private final TimeUnit timePeriodUnit;
+        private final TimedBuffer<TimestampedLong> timedBuffer;
+        private final ProcessorLog logger;
+
+        private volatile long penalizationExpired;
+        private volatile long lastUpdateTime;
+
+        public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
+            this.timePeriodUnit = unit;
+            this.timePeriodValue = timePeriod;
+            this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
+            this.logger = logger;
+        }
+
+        public void setMaxRate(final long maxRate) {
+            this.maxRate.set(maxRate);
+        }
+
+        public long lastUpdateTime() {
+            return lastUpdateTime;
+        }
+
+        public boolean tryAdd(final long value) {
+            final long now = System.currentTimeMillis();
+            if (penalizationExpired > now) {
+                return false;
+            }
+
+            final long maxRateValue = maxRate.get();
+
+            final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
+            if (sum != null && sum.getValue() >= maxRateValue) {
+                logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[] {sum.getValue(), value});
+                return false;
+            }
+
+            logger.debug("current sum for throttle is {}, so allowing rate of {} through",
+                new Object[] {sum == null ? 0 : sum.getValue(), value});
+
+            final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
+            if (transferred > maxRateValue) {
+                final long amountOver = transferred - maxRateValue;
+                // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
+                final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
+                final double pct = (double) amountOver / (double) maxRateValue;
+                final long penalizationPeriod = (long) (milliDuration * pct);
+                this.penalizationExpired = now + penalizationPeriod;
+                logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[] {value, penalizationPeriod});
+            }
+
+            lastUpdateTime = now;
+            return true;
+        }
+    }
 }


[09/17] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
index cbcc54d..385ac73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
@@ -23,7 +23,8 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -57,6 +58,7 @@ import org.apache.nifi.processors.hadoop.util.SequenceFileWriter;
  *
  */
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "sequence file", "create", "sequencefile"})
 @CapabilityDescription("Creates Hadoop Sequence Files from incoming flow files")
 @SeeAlso(PutHDFS.class)

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
index 4a52fb7..aa03e73 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/FetchHDFS.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -44,6 +46,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "hdfs", "get", "ingest", "fetch", "source"})
 @CapabilityDescription("Retrieves a file from HDFS. The content of the incoming FlowFile is replaced by the content of the file in HDFS. "
         + "The file in HDFS is left intact without any changes being made to it.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
index de776d4..4c9deea 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFS.java
@@ -41,6 +41,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -62,6 +64,7 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_ALLOWED)
 @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
 @CapabilityDescription("Fetch files from Hadoop Distributed File System (HDFS) into FlowFiles. This Processor will delete the file from HDFS after fetching it.")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
index 151cbf2..563bda8 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ListHDFS.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -66,6 +68,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 
 @TriggerSerially
 @TriggerWhenEmpty
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
 @CapabilityDescription("Retrieves a listing of files from HDFS. For each file that is listed in HDFS, creates a FlowFile that represents "
         + "the HDFS file so that it can be fetched in conjunction with ListHDFS. This Processor is designed to run on Primary Node only "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
index 901159b..bedf1b9 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -59,6 +61,7 @@ import org.apache.nifi.util.StopWatch;
 /**
  * This processor copies FlowFiles to HDFS.
  */
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"hadoop", "HDFS", "put", "copy", "filesystem"})
 @CapabilityDescription("Write FlowFile data to Hadoop Distributed File System (HDFS)")
 @WritesAttribute(attribute = "filename", description = "The name of the file written to HDFS comes from the value of this attribute.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
index 574fb2d..3a6ac79 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java
@@ -26,6 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,6 +59,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "health level 7", "healthcare", "extract", "attributes"})
 @CapabilityDescription("Extracts information from an HL7 (Health Level 7) formatted FlowFile and adds the information as FlowFile Attributes. "
         + "The attributes are named as <Segment Name> <dot> <Field Index>. If the segment is repeating, the naming will be "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
index 53e7e69..26e8bb6 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/RouteHL7.java
@@ -29,6 +29,8 @@ import java.util.Set;
 import org.apache.nifi.annotation.behavior.DynamicProperties;
 import org.apache.nifi.annotation.behavior.DynamicProperty;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -63,6 +65,7 @@ import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"HL7", "healthcare", "route", "Health Level 7"})
 @DynamicProperties({
     @DynamicProperty(name = "Name of a Relationship", value = "An HL7 Query Language query",

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
index 7fe6195..b44eccd 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ExtractImageMetadata.java
@@ -16,11 +16,18 @@
  */
 package org.apache.nifi.processors.image;
 
-import com.drew.imaging.ImageMetadataReader;
-import com.drew.imaging.ImageProcessingException;
-import com.drew.metadata.Directory;
-import com.drew.metadata.Metadata;
-import com.drew.metadata.Tag;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -30,25 +37,22 @@ import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.HashMap;
+import com.drew.imaging.ImageMetadataReader;
+import com.drew.imaging.ImageProcessingException;
+import com.drew.metadata.Directory;
+import com.drew.metadata.Metadata;
+import com.drew.metadata.Tag;
 
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Exif", "Exchangeable", "image", "file", "format", "JPG", "GIF", "PNG", "BMP", "metadata","IPTC", "XMP"})
 @CapabilityDescription("Extract the image metadata from flowfiles containing images. This processor relies on this "
         + "metadata extractor library https://github.com/drewnoakes/metadata-extractor. It extracts a long list of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
index c085b5f..176561f 100644
--- a/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
+++ b/nifi-nar-bundles/nifi-image-bundle/nifi-image-processors/src/main/java/org/apache/nifi/processors/image/ResizeImage.java
@@ -17,7 +17,27 @@
 
 package org.apache.nifi.processors.image;
 
+import java.awt.Graphics2D;
+import java.awt.Image;
+import java.awt.image.BufferedImage;
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import javax.imageio.ImageIO;
+import javax.imageio.ImageReader;
+import javax.imageio.stream.ImageInputStream;
+
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -33,25 +53,9 @@ import org.apache.nifi.processor.io.StreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.StopWatch;
 
-import javax.imageio.ImageIO;
-import javax.imageio.ImageReader;
-import javax.imageio.stream.ImageInputStream;
-import java.awt.Image;
-import java.awt.Graphics2D;
-import java.awt.image.BufferedImage;
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Iterator;
-import java.util.concurrent.TimeUnit;
-
 @EventDriven
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "resize", "image", "jpg", "jpeg", "png", "bmp", "wbmp", "gif" })
 @CapabilityDescription("Resizes an image to user-specified dimensions. This Processor uses the image codecs registered with the "
     + "environment that NiFi is running in. By default, this includes JPEG, PNG, BMP, WBMP, and GIF images.")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
index 26590df..e10977b 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/GetKafka.java
@@ -32,18 +32,13 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import kafka.consumer.Consumer;
-import kafka.consumer.ConsumerConfig;
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
 import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
@@ -58,7 +53,15 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.OutputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import kafka.consumer.Consumer;
+import kafka.consumer.ConsumerConfig;
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.message.MessageAndMetadata;
+
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Fetches messages from Apache Kafka")
 @Tags({"Kafka", "Apache", "Get", "Ingest", "Ingress", "Topic", "PubSub"})
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
index d83c7bf..cff285c 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/processors/kafka/PutKafka.java
@@ -30,10 +30,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -58,9 +56,13 @@ import org.apache.nifi.stream.io.StreamUtils;
 import org.apache.nifi.stream.io.util.NonThreadSafeCircularBuffer;
 import org.apache.nifi.util.LongHolder;
 
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
 import scala.actors.threadpool.Arrays;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({ "Apache", "Kafka", "Put", "Send", "Message", "PubSub" })
 @CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka")
 public class PutKafka extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
index 6c20a8f..6f126aa 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
@@ -18,18 +18,20 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
+import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
@@ -53,11 +55,13 @@ import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.CSVFileReader;
 import org.kitesdk.data.spi.filesystem.CSVProperties;
 
-import static org.apache.nifi.processor.util.StandardValidators.createLongValidator;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 
 @Tags({"kite", "csv", "avro"})
-@CapabilityDescription(
-        "Converts CSV files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts CSV files to Avro according to an Avro Schema")
 public class ConvertCSVToAvro extends AbstractKiteProcessor {
 
     private static final CSVProperties DEFAULTS = new CSVProperties.Builder().build();

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
index ec1503c..af120bf 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertJSONToAvro.java
@@ -18,18 +18,18 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -47,9 +47,13 @@ import org.kitesdk.data.SchemaNotFoundException;
 import org.kitesdk.data.spi.DefaultConfiguration;
 import org.kitesdk.data.spi.filesystem.JSONFileReader;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 @Tags({"kite", "json", "avro"})
-@CapabilityDescription(
-        "Converts JSON files to Avro according to an Avro Schema")
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@CapabilityDescription("Converts JSON files to Avro according to an Avro Schema")
 public class ConvertJSONToAvro extends AbstractKiteProcessor {
 
     private static final Relationship SUCCESS = new Relationship.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
index 7a30db1..1986f0b 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/StoreInKiteDataset.java
@@ -18,16 +18,17 @@
  */
 package org.apache.nifi.processors.kite;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData.Record;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -46,6 +47,10 @@ import org.kitesdk.data.ValidationException;
 import org.kitesdk.data.View;
 import org.kitesdk.data.spi.SchemaValidationUtil;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"kite", "avro", "parquet", "hadoop", "hive", "hdfs", "hbase"})
 @CapabilityDescription("Stores Avro records in a Kite dataset")
 public class StoreInKiteDataset extends AbstractKiteProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
index 8398152..5f58781 100644
--- a/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
+++ b/nifi-nar-bundles/nifi-language-translation-bundle/nifi-yandex-processors/src/main/java/org/apache/nifi/processors/yandex/YandexTranslate.java
@@ -33,6 +33,8 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -70,6 +72,7 @@ import com.sun.jersey.api.json.JSONConfiguration;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"yandex", "translate", "translation", "language"})
 @CapabilityDescription("Translates content and attributes from one language to another")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/nifi-nar-bundles/nifi-pcap-bundle/nifi-pcap-processors/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
index a78b112..e41b583 100644
--- a/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
+++ b/nifi-nar-bundles/nifi-social-media-bundle/nifi-twitter-processors/src/main/java/org/apache/nifi/processors/twitter/GetTwitter.java
@@ -32,6 +32,8 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -57,8 +59,8 @@ import org.apache.nifi.processor.util.StandardValidators;
 import com.twitter.hbc.ClientBuilder;
 import com.twitter.hbc.core.Client;
 import com.twitter.hbc.core.Constants;
-import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.Location ;
+import com.twitter.hbc.core.endpoint.Location.Coordinate ;
 import com.twitter.hbc.core.endpoint.StatusesFilterEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesFirehoseEndpoint;
 import com.twitter.hbc.core.endpoint.StatusesSampleEndpoint;
@@ -69,6 +71,7 @@ import com.twitter.hbc.httpclient.auth.Authentication;
 import com.twitter.hbc.httpclient.auth.OAuth1;
 
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"twitter", "tweets", "social media", "status", "json"})
 @CapabilityDescription("Pulls status changes from Twitter's streaming API")
 @WritesAttribute(attribute = "mime.type", description = "Sets mime type to application/json")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
index ff264a1..a85aa0f 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
@@ -18,7 +18,29 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
 import org.apache.commons.io.IOUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnRemoved;
@@ -41,27 +63,8 @@ import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
 @Tags({"Apache", "Solr", "Get", "Pull"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Queries Solr and outputs the results as a FlowFile")
 public class GetSolr extends SolrProcessor {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
index 560ad34..df034c9 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/PutSolrContentStream.java
@@ -18,7 +18,24 @@
  */
 package org.apache.nifi.processors.solr;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
@@ -40,22 +57,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.MultiMapSolrParams;
 import org.apache.solr.common.util.ContentStreamBase;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
 @Tags({"Apache", "Solr", "Put", "Send"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Sends the contents of a FlowFile as a ContentStream to Solr")
 @DynamicProperty(name="A Solr request parameter name", value="A Solr request parameter value",
         description="These parameters will be passed to Solr on the request")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
index 9887e38..816b407 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Base64EncodeContent.java
@@ -29,6 +29,8 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.codec.binary.Base64InputStream;
 import org.apache.commons.codec.binary.Base64OutputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -51,101 +53,102 @@ import org.apache.nifi.util.StopWatch;
 @SupportsBatching
 @Tags({"encode", "base64"})
 @CapabilityDescription("Encodes or decodes content to and from base64")
+@InputRequirement(Requirement.INPUT_REQUIRED)
 public class Base64EncodeContent extends AbstractProcessor {
 
-    public static final String ENCODE_MODE = "Encode";
-    public static final String DECODE_MODE = "Decode";
+	public static final String ENCODE_MODE = "Encode";
+	public static final String DECODE_MODE = "Decode";
 
-    public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
-            .name("Mode")
-            .description("Specifies whether the content should be encoded or decoded")
-            .required(true)
-            .allowableValues(ENCODE_MODE, DECODE_MODE)
-            .defaultValue(ENCODE_MODE)
-            .build();
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("Any FlowFile that is successfully encoded or decoded will be routed to success")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
-            .build();
+	public static final PropertyDescriptor MODE = new PropertyDescriptor.Builder()
+		.name("Mode")
+		.description("Specifies whether the content should be encoded or decoded")
+		.required(true)
+		.allowableValues(ENCODE_MODE, DECODE_MODE)
+		.defaultValue(ENCODE_MODE)
+		.build();
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
+		.build();
 
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
 
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(MODE);
-        this.properties = Collections.unmodifiableList(properties);
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(MODE);
+		this.properties = Collections.unmodifiableList(properties);
 
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        relationships.add(REL_FAILURE);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		relationships.add(REL_FAILURE);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
 
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
 
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
 
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
 
-        final ProcessorLog logger = getLogger();
+		final ProcessorLog logger = getLogger();
 
-        boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
-        try {
-            final StopWatch stopWatch = new StopWatch(true);
-            if (encode) {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64OutputStream bos = new Base64OutputStream(out)) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = in.read(buf)) > 0) {
-                                bos.write(buf, 0, len);
-                            }
-                            bos.flush();
-                        }
-                    }
-                });
-            } else {
-                flowFile = session.write(flowFile, new StreamCallback() {
-                    @Override
-                    public void process(InputStream in, OutputStream out) throws IOException {
-                        try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
-                            int len = -1;
-                            byte[] buf = new byte[8192];
-                            while ((len = bis.read(buf)) > 0) {
-                                out.write(buf, 0, len);
-                            }
-                            out.flush();
-                        }
-                    }
-                });
-            }
+		boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
+		try {
+			final StopWatch stopWatch = new StopWatch(true);
+			if (encode) {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64OutputStream bos = new Base64OutputStream(out)) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = in.read(buf)) > 0) {
+								bos.write(buf, 0, len);
+							}
+							bos.flush();
+						}
+					}
+				});
+			} else {
+				flowFile = session.write(flowFile, new StreamCallback() {
+					@Override
+					public void process(InputStream in, OutputStream out) throws IOException {
+						try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
+							int len = -1;
+							byte[] buf = new byte[8192];
+							while ((len = bis.read(buf)) > 0) {
+								out.write(buf, 0, len);
+							}
+							out.flush();
+						}
+					}
+				});
+			}
 
-            logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
-            session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
-            session.transfer(flowFile, REL_SUCCESS);
-        } catch (ProcessException e) {
-            logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
-            session.transfer(flowFile, REL_FAILURE);
-        }
-    }
+			logger.info("Successfully {} {}", new Object[]{encode ? "encoded" : "decoded", flowFile});
+			session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
+			session.transfer(flowFile, REL_SUCCESS);
+		} catch (ProcessException e) {
+			logger.error("Failed to {} {} due to {}", new Object[]{encode ? "encode" : "decode", flowFile, e});
+			session.transfer(flowFile, REL_FAILURE);
+		}
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
index 1b9b20c..593cf44 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java
@@ -29,20 +29,18 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import lzma.sdk.lzma.Decoder;
-import lzma.streams.LzmaInputStream;
-import lzma.streams.LzmaOutputStream;
-
 import org.apache.commons.compress.compressors.CompressorStreamFactory;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
 import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
@@ -63,9 +61,14 @@ import org.tukaani.xz.LZMA2Options;
 import org.tukaani.xz.XZInputStream;
 import org.tukaani.xz.XZOutputStream;
 
+import lzma.sdk.lzma.Decoder;
+import lzma.streams.LzmaInputStream;
+import lzma.streams.LzmaOutputStream;
+
 @EventDriven
 @SideEffectFree
 @SupportsBatching
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"content", "compress", "decompress", "gzip", "bzip2", "lzma", "xz-lzma2"})
 @CapabilityDescription("Compresses or decompresses the contents of FlowFiles using a user-specified compression algorithm and updates the mime.type "
     + "attribute as appropriate")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
index 2efc852..a45c211 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java
@@ -31,6 +31,12 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.regex.Pattern;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
@@ -43,10 +49,6 @@ import org.apache.nifi.processor.ProcessContext;
 import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.util.timebuffer.EntityAccess;
@@ -54,344 +56,345 @@ import org.apache.nifi.util.timebuffer.TimedBuffer;
 
 @SideEffectFree
 @TriggerSerially
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"rate control", "throttle", "rate", "throughput"})
 @CapabilityDescription("Controls the rate at which data is transferred to follow-on processors.")
 public class ControlRate extends AbstractProcessor {
 
-    public static final String DATA_RATE = "data rate";
-    public static final String FLOWFILE_RATE = "flowfile count";
-    public static final String ATTRIBUTE_RATE = "attribute value";
-
-    public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
-            .name("Rate Control Criteria")
-            .description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
-            .required(true)
-            .allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
-            .defaultValue(DATA_RATE)
-            .build();
-    public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
-            .name("Maximum Rate")
-            .description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
-                    + "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
-            .required(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
-            .build();
-    public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Rate Controlled Attribute")
-            .description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
-                    + "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
-                    + "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-    public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
-            .name("Time Duration")
-            .description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
-            .required(true)
-            .addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
-            .defaultValue("1 min")
-            .build();
-    public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
-            .name("Grouping Attribute")
-            .description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
-                    + "each value specified by the attribute with this name. Changing this value resets the rate counters.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .expressionLanguageSupported(false)
-            .build();
-
-    public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("All FlowFiles are transferred to this relationship")
-            .build();
-    public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
-            .build();
-
-    private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
-    private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
-
-    private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
-    private List<PropertyDescriptor> properties;
-    private Set<Relationship> relationships;
-    private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
-
-    @Override
-    protected void init(final ProcessorInitializationContext context) {
-        final List<PropertyDescriptor> properties = new ArrayList<>();
-        properties.add(RATE_CONTROL_CRITERIA);
-        properties.add(MAX_RATE);
-        properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
-        properties.add(TIME_PERIOD);
-        properties.add(GROUPING_ATTRIBUTE_NAME);
-        this.properties = Collections.unmodifiableList(properties);
-
-        final Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        this.relationships = Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
-
-    @Override
-    public Set<Relationship> getRelationships() {
-        return relationships;
-    }
-
-    @Override
-    protected Collection<ValidationResult> customValidate(final ValidationContext context) {
-        final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
-
-        final Validator rateValidator;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
-                break;
-            case ATTRIBUTE_RATE:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-                if (rateAttr == null) {
-                    validationResults.add(new ValidationResult.Builder()
-                            .subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
-                            .explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
-                            .build());
-                }
-                break;
-            case FLOWFILE_RATE:
-            default:
-                rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
-                break;
-        }
-
-        final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
-        if (!rateResult.isValid()) {
-            validationResults.add(rateResult);
-        }
-
-        return validationResults;
-    }
-
-    @Override
-    public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
-        super.onPropertyModified(descriptor, oldValue, newValue);
-
-        if (descriptor.equals(RATE_CONTROL_CRITERIA)
-                || descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
-                || descriptor.equals(GROUPING_ATTRIBUTE_NAME)
-                || descriptor.equals(TIME_PERIOD)) {
-            // if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
-            throttleMap.clear();
-        } else if (descriptor.equals(MAX_RATE)) {
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
-                newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(newValue);
-            }
-
-            for (final Throttle throttle : throttleMap.values()) {
-                throttle.setMaxRate(newRate);
-            }
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
-        final long lastClearTime = lastThrottleClearTime.get();
-        final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
-        if (lastClearTime < throttleExpirationMillis) {
-            if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
-                final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
-                while (itr.hasNext()) {
-                    final Map.Entry<String, Throttle> entry = itr.next();
-                    final Throttle throttle = entry.getValue();
-                    if (throttle.tryLock()) {
-                        try {
-                            if (throttle.lastUpdateTime() < lastClearTime) {
-                                itr.remove();
-                            }
-                        } finally {
-                            throttle.unlock();
-                        }
-                    }
-                }
-            }
-        }
-
-        // TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
-
-        final ProcessorLog logger = getLogger();
-        final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
-        final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
-        long rateValue;
-        switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
-            case DATA_RATE:
-                rateValue = flowFile.getSize();
-                break;
-            case FLOWFILE_RATE:
-                rateValue = 1;
-                break;
-            case ATTRIBUTE_RATE:
-                final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
-                if (attributeValue == null) {
-                    logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-
-                if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
-                    logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
-                            new Object[]{flowFile, rateControlAttributeName, attributeValue});
-                    session.transfer(flowFile, REL_FAILURE);
-                    return;
-                }
-                rateValue = Long.parseLong(attributeValue);
-                break;
-            default:
-                throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
-        }
-
-        final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
-        final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
-        Throttle throttle = throttleMap.get(groupName);
-        if (throttle == null) {
-            throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
-
-            final String maxRateValue = context.getProperty(MAX_RATE).getValue();
-            final long newRate;
-            if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
-                newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
-            } else {
-                newRate = Long.parseLong(maxRateValue);
-            }
-            throttle.setMaxRate(newRate);
-
-            throttleMap.put(groupName, throttle);
-        }
-
-        throttle.lock();
-        try {
-            if (throttle.tryAdd(rateValue)) {
-                logger.info("transferring {} to 'success'", new Object[]{flowFile});
-                session.transfer(flowFile, REL_SUCCESS);
-            } else {
-                flowFile = session.penalize(flowFile);
-                session.transfer(flowFile);
-            }
-        } finally {
-            throttle.unlock();
-        }
-    }
-
-    private static class TimestampedLong {
-
-        private final Long value;
-        private final long timestamp = System.currentTimeMillis();
-
-        public TimestampedLong(final Long value) {
-            this.value = value;
-        }
-
-        public Long getValue() {
-            return value;
-        }
-
-        public long getTimestamp() {
-            return timestamp;
-        }
-    }
-
-    private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
-
-        @Override
-        public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
-            if (oldValue == null && toAdd == null) {
-                return new TimestampedLong(0L);
-            } else if (oldValue == null) {
-                return toAdd;
-            } else if (toAdd == null) {
-                return oldValue;
-            }
-
-            return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
-        }
-
-        @Override
-        public TimestampedLong createNew() {
-            return new TimestampedLong(0L);
-        }
-
-        @Override
-        public long getTimestamp(TimestampedLong entity) {
-            return entity == null ? 0L : entity.getTimestamp();
-        }
-    }
-
-    private static class Throttle extends ReentrantLock {
-
-        private final AtomicLong maxRate = new AtomicLong(1L);
-        private final long timePeriodValue;
-        private final TimeUnit timePeriodUnit;
-        private final TimedBuffer<TimestampedLong> timedBuffer;
-        private final ProcessorLog logger;
-
-        private volatile long penalizationExpired;
-        private volatile long lastUpdateTime;
-
-        public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
-            this.timePeriodUnit = unit;
-            this.timePeriodValue = timePeriod;
-            this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
-            this.logger = logger;
-        }
-
-        public void setMaxRate(final long maxRate) {
-            this.maxRate.set(maxRate);
-        }
-
-        public long lastUpdateTime() {
-            return lastUpdateTime;
-        }
-
-        public boolean tryAdd(final long value) {
-            final long now = System.currentTimeMillis();
-            if (penalizationExpired > now) {
-                return false;
-            }
-
-            final long maxRateValue = maxRate.get();
-
-            final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
-            if (sum != null && sum.getValue() >= maxRateValue) {
-                logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
-                return false;
-            }
-
-            logger.debug("current sum for throttle is {}, so allowing rate of {} through",
-                    new Object[]{sum == null ? 0 : sum.getValue(), value});
-
-            final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
-            if (transferred > maxRateValue) {
-                final long amountOver = transferred - maxRateValue;
-                // determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
-                final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
-                final double pct = (double) amountOver / (double) maxRateValue;
-                final long penalizationPeriod = (long) (milliDuration * pct);
-                this.penalizationExpired = now + penalizationPeriod;
-                logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
-            }
-
-            lastUpdateTime = now;
-            return true;
-        }
-    }
+	public static final String DATA_RATE = "data rate";
+	public static final String FLOWFILE_RATE = "flowfile count";
+	public static final String ATTRIBUTE_RATE = "attribute value";
+
+	public static final PropertyDescriptor RATE_CONTROL_CRITERIA = new PropertyDescriptor.Builder()
+		.name("Rate Control Criteria")
+		.description("Indicates the criteria that is used to control the throughput rate. Changing this value resets the rate counters.")
+		.required(true)
+		.allowableValues(DATA_RATE, FLOWFILE_RATE, ATTRIBUTE_RATE)
+		.defaultValue(DATA_RATE)
+		.build();
+	public static final PropertyDescriptor MAX_RATE = new PropertyDescriptor.Builder()
+		.name("Maximum Rate")
+		.description("The maximum rate at which data should pass through this processor. The format of this property is expected to be a "
+			+ "positive integer, or a Data Size (such as '1 MB') if Rate Control Criteria is set to 'data rate'.")
+		.required(true)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR) // validated in customValidate b/c dependent on Rate Control Criteria
+		.build();
+	public static final PropertyDescriptor RATE_CONTROL_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Rate Controlled Attribute")
+		.description("The name of an attribute whose values build toward the rate limit if Rate Control Criteria is set to 'attribute value'. "
+			+ "The value of the attribute referenced by this property must be a positive long, or the FlowFile will be routed to failure. "
+			+ "This value is ignored if Rate Control Criteria is not set to 'attribute value'. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+	public static final PropertyDescriptor TIME_PERIOD = new PropertyDescriptor.Builder()
+		.name("Time Duration")
+		.description("The amount of time to which the Maximum Data Size and Maximum Number of Files pertains. Changing this value resets the rate counters.")
+		.required(true)
+		.addValidator(StandardValidators.createTimePeriodValidator(1, TimeUnit.SECONDS, Integer.MAX_VALUE, TimeUnit.SECONDS))
+		.defaultValue("1 min")
+		.build();
+	public static final PropertyDescriptor GROUPING_ATTRIBUTE_NAME = new PropertyDescriptor.Builder()
+		.name("Grouping Attribute")
+		.description("By default, a single \"throttle\" is used for all FlowFiles. If this value is specified, a separate throttle is used for "
+			+ "each value specified by the attribute with this name. Changing this value resets the rate counters.")
+		.required(false)
+		.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+		.expressionLanguageSupported(false)
+		.build();
+
+	public static final Relationship REL_SUCCESS = new Relationship.Builder()
+		.name("success")
+		.description("All FlowFiles are transferred to this relationship")
+		.build();
+	public static final Relationship REL_FAILURE = new Relationship.Builder()
+		.name("failure")
+		.description("FlowFiles will be routed to this relationship if they are missing a necessary attribute or the attribute is not in the expected format")
+		.build();
+
+	private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
+	private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
+
+	private final ConcurrentMap<String, Throttle> throttleMap = new ConcurrentHashMap<>();
+	private List<PropertyDescriptor> properties;
+	private Set<Relationship> relationships;
+	private final AtomicLong lastThrottleClearTime = new AtomicLong(System.currentTimeMillis());
+
+	@Override
+	protected void init(final ProcessorInitializationContext context) {
+		final List<PropertyDescriptor> properties = new ArrayList<>();
+		properties.add(RATE_CONTROL_CRITERIA);
+		properties.add(MAX_RATE);
+		properties.add(RATE_CONTROL_ATTRIBUTE_NAME);
+		properties.add(TIME_PERIOD);
+		properties.add(GROUPING_ATTRIBUTE_NAME);
+		this.properties = Collections.unmodifiableList(properties);
+
+		final Set<Relationship> relationships = new HashSet<>();
+		relationships.add(REL_SUCCESS);
+		this.relationships = Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+		return properties;
+	}
+
+	@Override
+	public Set<Relationship> getRelationships() {
+		return relationships;
+	}
+
+	@Override
+	protected Collection<ValidationResult> customValidate(final ValidationContext context) {
+		final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(context));
+
+		final Validator rateValidator;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValidator = StandardValidators.DATA_SIZE_VALIDATOR;
+				break;
+			case ATTRIBUTE_RATE:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				final String rateAttr = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+				if (rateAttr == null) {
+					validationResults.add(new ValidationResult.Builder()
+						.subject(RATE_CONTROL_ATTRIBUTE_NAME.getName())
+						.explanation("<Rate Controlled Attribute> property must be set if using <Rate Control Criteria> of 'attribute value'")
+						.build());
+				}
+				break;
+			case FLOWFILE_RATE:
+			default:
+				rateValidator = StandardValidators.POSITIVE_LONG_VALIDATOR;
+				break;
+		}
+
+		final ValidationResult rateResult = rateValidator.validate("Maximum Rate", context.getProperty(MAX_RATE).getValue(), context);
+		if (!rateResult.isValid()) {
+			validationResults.add(rateResult);
+		}
+
+		return validationResults;
+	}
+
+	@Override
+	public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
+		super.onPropertyModified(descriptor, oldValue, newValue);
+
+		if (descriptor.equals(RATE_CONTROL_CRITERIA)
+			|| descriptor.equals(RATE_CONTROL_ATTRIBUTE_NAME)
+			|| descriptor.equals(GROUPING_ATTRIBUTE_NAME)
+			|| descriptor.equals(TIME_PERIOD)) {
+			// if the criteria that is being used to determine limits/throttles is changed, we must clear our throttle map.
+			throttleMap.clear();
+		} else if (descriptor.equals(MAX_RATE)) {
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(newValue).matches()) {
+				newRate = DataUnit.parseDataSize(newValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(newValue);
+			}
+
+			for (final Throttle throttle : throttleMap.values()) {
+				throttle.setMaxRate(newRate);
+			}
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
+		final long lastClearTime = lastThrottleClearTime.get();
+		final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
+		if (lastClearTime < throttleExpirationMillis) {
+			if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
+				final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
+				while (itr.hasNext()) {
+					final Map.Entry<String, Throttle> entry = itr.next();
+					final Throttle throttle = entry.getValue();
+					if (throttle.tryLock()) {
+						try {
+							if (throttle.lastUpdateTime() < lastClearTime) {
+								itr.remove();
+							}
+						} finally {
+							throttle.unlock();
+						}
+					}
+				}
+			}
+		}
+
+		// TODO: Should periodically clear any Throttle that has not been used in more than 2 throttling periods
+		FlowFile flowFile = session.get();
+		if (flowFile == null) {
+			return;
+		}
+
+		final ProcessorLog logger = getLogger();
+		final long seconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS);
+		final String rateControlAttributeName = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
+		long rateValue;
+		switch (context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase()) {
+			case DATA_RATE:
+				rateValue = flowFile.getSize();
+				break;
+			case FLOWFILE_RATE:
+				rateValue = 1;
+				break;
+			case ATTRIBUTE_RATE:
+				final String attributeValue = flowFile.getAttribute(rateControlAttributeName);
+				if (attributeValue == null) {
+					logger.error("routing {} to 'failure' because FlowFile is missing required attribute {}", new Object[]{flowFile, rateControlAttributeName});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+
+				if (!POSITIVE_LONG_PATTERN.matcher(attributeValue).matches()) {
+					logger.error("routing {} to 'failure' because FlowFile attribute {} has a value of {}, which is not a positive long",
+						new Object[]{flowFile, rateControlAttributeName, attributeValue});
+					session.transfer(flowFile, REL_FAILURE);
+					return;
+				}
+				rateValue = Long.parseLong(attributeValue);
+				break;
+			default:
+				throw new AssertionError("<Rate Control Criteria> property set to illegal value of " + context.getProperty(RATE_CONTROL_CRITERIA).getValue());
+		}
+
+		final String groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
+		final String groupName = (groupingAttributeName == null) ? DEFAULT_GROUP_ATTRIBUTE : flowFile.getAttribute(groupingAttributeName);
+		Throttle throttle = throttleMap.get(groupName);
+		if (throttle == null) {
+			throttle = new Throttle((int) seconds, TimeUnit.SECONDS, logger);
+
+			final String maxRateValue = context.getProperty(MAX_RATE).getValue();
+			final long newRate;
+			if (DataUnit.DATA_SIZE_PATTERN.matcher(maxRateValue).matches()) {
+				newRate = DataUnit.parseDataSize(maxRateValue, DataUnit.B).longValue();
+			} else {
+				newRate = Long.parseLong(maxRateValue);
+			}
+			throttle.setMaxRate(newRate);
+
+			throttleMap.put(groupName, throttle);
+		}
+
+		throttle.lock();
+		try {
+			if (throttle.tryAdd(rateValue)) {
+				logger.info("transferring {} to 'success'", new Object[]{flowFile});
+				session.transfer(flowFile, REL_SUCCESS);
+			} else {
+				flowFile = session.penalize(flowFile);
+				session.transfer(flowFile);
+			}
+		} finally {
+			throttle.unlock();
+		}
+	}
+
+	private static class TimestampedLong {
+
+		private final Long value;
+		private final long timestamp = System.currentTimeMillis();
+
+		public TimestampedLong(final Long value) {
+			this.value = value;
+		}
+
+		public Long getValue() {
+			return value;
+		}
+
+		public long getTimestamp() {
+			return timestamp;
+		}
+	}
+
+	private static class RateEntityAccess implements EntityAccess<TimestampedLong> {
+
+		@Override
+		public TimestampedLong aggregate(TimestampedLong oldValue, TimestampedLong toAdd) {
+			if (oldValue == null && toAdd == null) {
+				return new TimestampedLong(0L);
+			} else if (oldValue == null) {
+				return toAdd;
+			} else if (toAdd == null) {
+				return oldValue;
+			}
+
+			return new TimestampedLong(oldValue.getValue() + toAdd.getValue());
+		}
+
+		@Override
+		public TimestampedLong createNew() {
+			return new TimestampedLong(0L);
+		}
+
+		@Override
+		public long getTimestamp(TimestampedLong entity) {
+			return entity == null ? 0L : entity.getTimestamp();
+		}
+	}
+
+	private static class Throttle extends ReentrantLock {
+
+		private final AtomicLong maxRate = new AtomicLong(1L);
+		private final long timePeriodValue;
+		private final TimeUnit timePeriodUnit;
+		private final TimedBuffer<TimestampedLong> timedBuffer;
+		private final ProcessorLog logger;
+
+		private volatile long penalizationExpired;
+		private volatile long lastUpdateTime;
+
+		public Throttle(final int timePeriod, final TimeUnit unit, final ProcessorLog logger) {
+			this.timePeriodUnit = unit;
+			this.timePeriodValue = timePeriod;
+			this.timedBuffer = new TimedBuffer<>(unit, timePeriod, new RateEntityAccess());
+			this.logger = logger;
+		}
+
+		public void setMaxRate(final long maxRate) {
+			this.maxRate.set(maxRate);
+		}
+
+		public long lastUpdateTime() {
+			return lastUpdateTime;
+		}
+
+		public boolean tryAdd(final long value) {
+			final long now = System.currentTimeMillis();
+			if (penalizationExpired > now) {
+				return false;
+			}
+
+			final long maxRateValue = maxRate.get();
+
+			final TimestampedLong sum = timedBuffer.getAggregateValue(TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit));
+			if (sum != null && sum.getValue() >= maxRateValue) {
+				logger.debug("current sum for throttle is {}, so not allowing rate of {} through", new Object[]{sum.getValue(), value});
+				return false;
+			}
+
+			logger.debug("current sum for throttle is {}, so allowing rate of {} through",
+				new Object[]{sum == null ? 0 : sum.getValue(), value});
+
+			final long transferred = timedBuffer.add(new TimestampedLong(value)).getValue();
+			if (transferred > maxRateValue) {
+				final long amountOver = transferred - maxRateValue;
+				// determine how long it should take to transfer 'amountOver' and 'penalize' the Throttle for that long
+				final long milliDuration = TimeUnit.MILLISECONDS.convert(timePeriodValue, timePeriodUnit);
+				final double pct = (double) amountOver / (double) maxRateValue;
+				final long penalizationPeriod = (long) (milliDuration * pct);
+				this.penalizationExpired = now + penalizationPeriod;
+				logger.debug("allowing rate of {} through but penalizing Throttle for {} milliseconds", new Object[]{value, penalizationPeriod});
+			}
+
+			lastUpdateTime = now;
+			return true;
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index a0a1364..7a99a59 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -33,8 +33,10 @@ import org.apache.nifi.flowfile.FlowFile;
 import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.StreamCallback;
@@ -76,6 +78,7 @@ import java.util.concurrent.TimeUnit;
  */
 @EventDriven
 @SideEffectFree
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @SupportsBatching
 @Tags({"text", "convert", "characterset", "character set"})
 @CapabilityDescription("Converts a FlowFile's content from one character set to another")

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
index 7eda593..9591960 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
@@ -34,10 +34,12 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
 import org.apache.nifi.annotation.documentation.Tags;
@@ -64,6 +66,7 @@ import org.codehaus.jackson.node.JsonNodeFactory;
 @SideEffectFree
 @SupportsBatching
 @SeeAlso(PutSQL.class)
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"json", "sql", "database", "rdbms", "insert", "update", "relational", "flat"})
 @CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE or INSERT SQL statement. The incoming FlowFile is expected to be "
         + "\"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to "


[10/17] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index cbd0f88..0c39eda 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -16,14 +16,6 @@
  */
 package org.apache.nifi.controller;
 
-import org.apache.nifi.annotation.behavior.EventDriven;
-import org.apache.nifi.annotation.behavior.SideEffectFree;
-import org.apache.nifi.annotation.behavior.SupportsBatching;
-import org.apache.nifi.annotation.behavior.TriggerSerially;
-import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
-import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-
 import static java.util.Objects.requireNonNull;
 
 import java.util.ArrayList;
@@ -43,6 +35,17 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SideEffectFree;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.TriggerSerially;
+import org.apache.nifi.annotation.behavior.TriggerWhenAnyDestinationAvailable;
+import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.connectable.Connectable;
@@ -61,8 +64,6 @@ import org.apache.nifi.processor.Processor;
 import org.apache.nifi.processor.Relationship;
 import org.apache.nifi.scheduling.SchedulingStrategy;
 import org.apache.nifi.util.FormatUtils;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.quartz.CronExpression;
 import org.slf4j.LoggerFactory;
 
@@ -73,1185 +74,1242 @@ import org.slf4j.LoggerFactory;
  */
 public class StandardProcessorNode extends ProcessorNode implements Connectable {
 
-    public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
-
-    public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
-    public static final String DEFAULT_YIELD_PERIOD = "1 sec";
-    public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
-    private final AtomicReference<ProcessGroup> processGroup;
-    private final Processor processor;
-    private final AtomicReference<String> identifier;
-    private final Map<Connection, Connectable> destinations;
-    private final Map<Relationship, Set<Connection>> connections;
-    private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
-    private final AtomicReference<List<Connection>> incomingConnectionsRef;
-    private final ReentrantReadWriteLock rwLock;
-    private final Lock readLock;
-    private final Lock writeLock;
-    private final AtomicBoolean isolated;
-    private final AtomicBoolean lossTolerant;
-    private final AtomicReference<ScheduledState> scheduledState;
-    private final AtomicReference<String> comments;
-    private final AtomicReference<String> name;
-    private final AtomicReference<Position> position;
-    private final AtomicReference<String> annotationData;
-    private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
-    private final AtomicReference<String> yieldPeriod;
-    private final AtomicReference<String> penalizationPeriod;
-    private final AtomicReference<Map<String, String>> style;
-    private final AtomicInteger concurrentTaskCount;
-    private final AtomicLong yieldExpiration;
-    private final AtomicLong schedulingNanos;
-    private final boolean triggerWhenEmpty;
-    private final boolean sideEffectFree;
-    private final boolean triggeredSerially;
-    private final boolean triggerWhenAnyDestinationAvailable;
-    private final boolean eventDrivenSupported;
-    private final boolean batchSupported;
-    private final ValidationContextFactory validationContextFactory;
-    private final ProcessScheduler processScheduler;
-    private long runNanos = 0L;
-
-    private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
-
-    @SuppressWarnings("deprecation")
-    public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
-            final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
-        super(processor, uuid, validationContextFactory, controllerServiceProvider);
-
-        this.processor = processor;
-        identifier = new AtomicReference<>(uuid);
-        destinations = new HashMap<>();
-        connections = new HashMap<>();
-        incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
-        scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
-        rwLock = new ReentrantReadWriteLock(false);
-        readLock = rwLock.readLock();
-        writeLock = rwLock.writeLock();
-        lossTolerant = new AtomicBoolean(false);
-        final Set<Relationship> emptySetOfRelationships = new HashSet<>();
-        undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
-        comments = new AtomicReference<>("");
-        name = new AtomicReference<>(processor.getClass().getSimpleName());
-        schedulingPeriod = new AtomicReference<>("0 sec");
-        schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
-        yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
-        yieldExpiration = new AtomicLong(0L);
-        concurrentTaskCount = new AtomicInteger(1);
-        position = new AtomicReference<>(new Position(0D, 0D));
-        style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
-        this.processGroup = new AtomicReference<>();
-        processScheduler = scheduler;
-        annotationData = new AtomicReference<>();
-        isolated = new AtomicBoolean(false);
-        penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
-
-        final Class<?> procClass = processor.getClass();
-        triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
-        sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
-        batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
-        triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
-        triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
-        this.validationContextFactory = validationContextFactory;
-        eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
-                || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
-        schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
-    }
-
-    /**
-     * @return comments about this specific processor instance
-     */
-    @Override
-    public String getComments() {
-        return comments.get();
-    }
-
-    /**
-     * Provides and opportunity to retain information about this particular processor instance
-     *
-     * @param comments new comments
-     */
-    @Override
-    public void setComments(final String comments) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.comments.set(comments);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public ScheduledState getScheduledState() {
-        return scheduledState.get();
-    }
-
-    @Override
-    public Position getPosition() {
-        return position.get();
-    }
-
-    @Override
-    public void setPosition(Position position) {
-        this.position.set(position);
-    }
-
-    @Override
-    public Map<String, String> getStyle() {
-        return style.get();
-    }
-
-    @Override
-    public void setStyle(final Map<String, String> style) {
-        if (style != null) {
-            this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
-        }
-    }
-
-    @Override
-    public String getIdentifier() {
-        return identifier.get();
-    }
-
-    /**
-     * @return if true flow file content generated by this processor is considered loss tolerant
-     */
-    @Override
-    public boolean isLossTolerant() {
-        return lossTolerant.get();
-    }
-
-    @Override
-    public boolean isIsolated() {
-        return isolated.get();
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenEmpty() {
-        return triggerWhenEmpty;
-    }
-
-    /**
-     * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
-     */
-    @Override
-    public boolean isSideEffectFree() {
-        return sideEffectFree;
-    }
-
-    @Override
-    public boolean isHighThroughputSupported() {
-        return batchSupported;
-    }
-
-    /**
-     * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
-     */
-    @Override
-    public boolean isTriggerWhenAnyDestinationAvailable() {
-        return triggerWhenAnyDestinationAvailable;
-    }
-
-    /**
-     * Indicates whether flow file content made by this processor must be persisted
-     *
-     * @param lossTolerant tolerant
-     */
-    @Override
-    public void setLossTolerant(final boolean lossTolerant) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.lossTolerant.set(lossTolerant);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Indicates whether the processor runs on only the primary node.
-     *
-     * @param isolated isolated
-     */
-    public void setIsolated(final boolean isolated) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.isolated.set(isolated);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isAutoTerminated(final Relationship relationship) {
-        final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
-        if (terminatable == null) {
-            return false;
-        }
-        return terminatable.contains(relationship);
-    }
-
-    @Override
-    public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            for (final Relationship rel : terminate) {
-                if (!getConnections(rel).isEmpty()) {
-                    throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
-                }
-            }
-            undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
-     */
-    @Override
-    public Set<Relationship> getAutoTerminatedRelationships() {
-        Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
-        if (relationships == null) {
-            relationships = new HashSet<>();
-        }
-        return Collections.unmodifiableSet(relationships);
-    }
-
-    @Override
-    public String getName() {
-        return name.get();
-    }
-
-    /**
-     * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
-     */
-    @SuppressWarnings("deprecation")
-    public String getProcessorDescription() {
-        CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
-        String description = null;
-        if (capDesc != null) {
-            description = capDesc.value();
-        } else {
-            final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
-                    = processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
-            if (deprecatedCapDesc != null) {
-                description = deprecatedCapDesc.value();
-            }
-        }
-
-        return description;
-    }
-
-    @Override
-    public void setName(final String name) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            this.name.set(name);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
-     * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
-     */
-    @Override
-    public long getSchedulingPeriod(final TimeUnit timeUnit) {
-        return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
-    }
-
-    @Override
-    public boolean isEventDrivenSupported() {
-        readLock.lock();
-        try {
-            return this.eventDrivenSupported;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    /**
-     * Updates the Scheduling Strategy used for this Processor
-     *
-     * @param schedulingStrategy strategy
-     *
-     * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
-     */
-    @Override
-    public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
-        writeLock.lock();
-        try {
-            if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
-                // not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
-                // it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
-                // Mode. Instead, we will simply leave it in Timer-Driven mode
-                return;
-            }
-
-            this.schedulingStrategy = schedulingStrategy;
-            setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * @return the currently configured scheduling strategy
-     */
-    @Override
-    public SchedulingStrategy getSchedulingStrategy() {
-        readLock.lock();
-        try {
-            return this.schedulingStrategy;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public String getSchedulingPeriod() {
-        return schedulingPeriod.get();
-    }
-
-    @Override
-    public void setScheduldingPeriod(final String schedulingPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-
-            switch (schedulingStrategy) {
-                case CRON_DRIVEN: {
-                    try {
-                        new CronExpression(schedulingPeriod);
-                    } catch (final Exception e) {
-                        throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
-                    }
-                }
-                break;
-                case PRIMARY_NODE_ONLY:
-                case TIMER_DRIVEN: {
-                    final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
-                    if (schedulingNanos < 0) {
-                        throw new IllegalArgumentException("Scheduling Period must be positive");
-                    }
-                    this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
-                }
-                break;
-                case EVENT_DRIVEN:
-                default:
-                    return;
-            }
-
-            this.schedulingPeriod.set(schedulingPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getRunDuration(final TimeUnit timeUnit) {
-        readLock.lock();
-        try {
-            return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void setRunDuration(final long duration, final TimeUnit timeUnit) {
-        writeLock.lock();
-        try {
-            if (duration < 0) {
-                throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
-            }
-
-            this.runNanos = timeUnit.toNanos(duration);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public long getYieldPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getYieldPeriod() {
-        return yieldPeriod.get();
-    }
-
-    @Override
-    public void setYieldPeriod(final String yieldPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
-            if (yieldMillis < 0) {
-                throw new IllegalArgumentException("Yield duration must be positive");
-            }
-            this.yieldPeriod.set(yieldPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
-     * methods.
-     */
-    @Override
-    public void yield() {
-        final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
-        yield(yieldMillis, TimeUnit.MILLISECONDS);
-
-        final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
-        LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
-    }
-
-    @Override
-    public void yield(final long period, final TimeUnit timeUnit) {
-        final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
-        yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
-
-        processScheduler.yield(this);
-    }
-
-    /**
-     * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
-     */
-    @Override
-    public long getYieldExpiration() {
-        return yieldExpiration.get();
-    }
-
-    @Override
-    public long getPenalizationPeriod(final TimeUnit timeUnit) {
-        return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
-    }
-
-    @Override
-    public String getPenalizationPeriod() {
-        return penalizationPeriod.get();
-    }
-
-    @Override
-    public void setPenalizationPeriod(final String penalizationPeriod) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
-            if (penalizationMillis < 0) {
-                throw new IllegalArgumentException("Penalization duration must be positive");
-            }
-            this.penalizationPeriod.set(penalizationPeriod);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    /**
-     * Determines the number of concurrent tasks that may be running for this processor.
-     *
-     * @param taskCount a number of concurrent tasks this processor may have running
-     * @throws IllegalArgumentException if the given value is less than 1
-     */
-    @Override
-    public void setMaxConcurrentTasks(final int taskCount) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-            }
-            if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
-                throw new IllegalArgumentException();
-            }
-            if (!triggeredSerially) {
-                concurrentTaskCount.set(taskCount);
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isTriggeredSerially() {
-        return triggeredSerially;
-    }
-
-    /**
-     * @return the number of tasks that may execute concurrently for this processor
-     */
-    @Override
-    public int getMaxConcurrentTasks() {
-        return concurrentTaskCount.get();
-    }
-
-    @Override
-    public LogLevel getBulletinLevel() {
-        return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
-    }
-
-    @Override
-    public void setBulletinLevel(final LogLevel level) {
-        LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
-    }
-
-    @Override
-    public Set<Connection> getConnections() {
-        final Set<Connection> allConnections = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Set<Connection> connectionSet : connections.values()) {
-                allConnections.addAll(connectionSet);
-            }
-        } finally {
-            readLock.unlock();
-        }
-
-        return allConnections;
-    }
-
-    @Override
-    public List<Connection> getIncomingConnections() {
-        return incomingConnectionsRef.get();
-    }
-
-    @Override
-    public Set<Connection> getConnections(final Relationship relationship) {
-        final Set<Connection> applicableConnections;
-        readLock.lock();
-        try {
-            applicableConnections = connections.get(relationship);
-        } finally {
-            readLock.unlock();
-        }
-        return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
-    }
-
-    @Override
-    public void addConnection(final Connection connection) {
-        Objects.requireNonNull(connection, "connection cannot be null");
-
-        if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
-            throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
-        }
-
-        writeLock.lock();
-        try {
-            List<Connection> updatedIncoming = null;
-            if (connection.getDestination().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                updatedIncoming = new ArrayList<>(incomingConnections);
-                if (!updatedIncoming.contains(connection)) {
-                    updatedIncoming.add(connection);
-                }
-            }
-
-            if (connection.getSource().equals(this)) {
-                // don't add the connection twice. This may occur if we have a self-loop because we will be told
-                // to add the connection once because we are the source and again because we are the destination.
-                if (!destinations.containsKey(connection)) {
-                    for (final Relationship relationship : connection.getRelationships()) {
-                        final Relationship rel = getRelationship(relationship.getName());
-                        Set<Connection> set = connections.get(rel);
-                        if (set == null) {
-                            set = new HashSet<>();
-                            connections.put(rel, set);
-                        }
-
-                        set.add(connection);
-
-                        destinations.put(connection, connection.getDestination());
-                    }
-
-                    final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                    if (autoTerminated != null) {
-                        autoTerminated.removeAll(connection.getRelationships());
-                        this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                    }
-                }
-            }
-
-            if (updatedIncoming != null) {
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            }
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean hasIncomingConnection() {
-        return !incomingConnectionsRef.get().isEmpty();
-    }
-
-    @Override
-    public void updateConnection(final Connection connection) throws IllegalStateException {
-        if (requireNonNull(connection).getSource().equals(this)) {
-            writeLock.lock();
-            try {
-                //
-                // update any relationships
-                //
-                // first check if any relations were removed.
-                final List<Relationship> existingRelationships = new ArrayList<>();
-                for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
-                    if (entry.getValue().contains(connection)) {
-                        existingRelationships.add(entry.getKey());
-                    }
-                }
-
-                for (final Relationship rel : connection.getRelationships()) {
-                    if (!existingRelationships.contains(rel)) {
-                        // relationship was removed. Check if this is legal.
-                        final Set<Connection> connectionsForRelationship = getConnections(rel);
-                        if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
-                            // if we are running and we do not terminate undefined relationships and this is the only
-                            // connection that defines the given relationship, and that relationship is required,
-                            // then it is not legal to remove this relationship from this connection.
-                            throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
-                                    + this + ", which is currently running");
-                        }
-                    }
-                }
-
-                // remove the connection from any list that currently contains
-                for (final Set<Connection> list : connections.values()) {
-                    list.remove(connection);
-                }
-
-                // add the connection in for all relationships listed.
-                for (final Relationship rel : connection.getRelationships()) {
-                    Set<Connection> set = connections.get(rel);
-                    if (set == null) {
-                        set = new HashSet<>();
-                        connections.put(rel, set);
-                    }
-                    set.add(connection);
-                }
-
-                // update to the new destination
-                destinations.put(connection, connection.getDestination());
-
-                final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
-                if (autoTerminated != null) {
-                    autoTerminated.removeAll(connection.getRelationships());
-                    this.undefinedRelationshipsToTerminate.set(autoTerminated);
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                // update our incoming connections -- we can just remove & re-add the connection to
-                // update the list.
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                updatedIncoming.remove(connection);
-                updatedIncoming.add(connection);
-                incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-            } finally {
-                writeLock.unlock();
-            }
-        }
-    }
-
-    @Override
-    public void removeConnection(final Connection connection) {
-        boolean connectionRemoved = false;
-
-        if (requireNonNull(connection).getSource().equals(this)) {
-            for (final Relationship relationship : connection.getRelationships()) {
-                final Set<Connection> connectionsForRelationship = getConnections(relationship);
-                if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
-                    throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
-                }
-            }
-
-            writeLock.lock();
-            try {
-                for (final Set<Connection> connectionList : this.connections.values()) {
-                    connectionList.remove(connection);
-                }
-
-                connectionRemoved = (destinations.remove(connection) != null);
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (connection.getDestination().equals(this)) {
-            writeLock.lock();
-            try {
-                final List<Connection> incomingConnections = incomingConnectionsRef.get();
-                if (incomingConnections.contains(connection)) {
-                    final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
-                    updatedIncoming.remove(connection);
-                    incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
-                    return;
-                }
-            } finally {
-                writeLock.unlock();
-            }
-        }
-
-        if (!connectionRemoved) {
-            throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
-        }
-    }
-
-    /**
-     * @param relationshipName name
-     * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
-     */
-    @Override
-    public Relationship getRelationship(final String relationshipName) {
-        final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
-        Relationship returnRel = specRel;
-
-        final Set<Relationship> relationships;
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            relationships = processor.getRelationships();
-        }
-
-        for (final Relationship rel : relationships) {
-            if (rel.equals(specRel)) {
-                returnRel = rel;
-                break;
-            }
-        }
-        return returnRel;
-    }
-
-    @Override
-    public Processor getProcessor() {
-        return this.processor;
-    }
-
-    /**
-     * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
-     */
-    public Set<Connectable> getDestinations() {
-        final Set<Connectable> nonSelfDestinations = new HashSet<>();
-        readLock.lock();
-        try {
-            for (final Connectable connectable : destinations.values()) {
-                if (connectable != this) {
-                    nonSelfDestinations.add(connectable);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return nonSelfDestinations;
-    }
-
-    public Set<Connectable> getDestinations(final Relationship relationship) {
-        readLock.lock();
-        try {
-            final Set<Connectable> destinationSet = new HashSet<>();
-            final Set<Connection> relationshipConnections = connections.get(relationship);
-            if (relationshipConnections != null) {
-                for (final Connection connection : relationshipConnections) {
-                    destinationSet.add(destinations.get(connection));
-                }
-            }
-            return destinationSet;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    public Set<Relationship> getUndefinedRelationships() {
-        final Set<Relationship> undefined = new HashSet<>();
-        readLock.lock();
-        try {
-            final Set<Relationship> relationships;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                relationships = processor.getRelationships();
-            }
-
-            if (relationships == null) {
-                return undefined;
-            }
-            for (final Relationship relation : relationships) {
-                final Set<Connection> connectionSet = this.connections.get(relation);
-                if (connectionSet == null || connectionSet.isEmpty()) {
-                    undefined.add(relation);
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-        return undefined;
-    }
-
-    /**
-     * Determines if the given node is a destination for this node
-     *
-     * @param node node
-     * @return true if is a direct destination node; false otherwise
-     */
-    boolean isRelated(final ProcessorNode node) {
-        readLock.lock();
-        try {
-            return this.destinations.containsValue(node);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isRunning() {
-        readLock.lock();
-        try {
-            return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public int getActiveThreadCount() {
-        readLock.lock();
-        try {
-            return processScheduler.getActiveThreadCount(this);
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public boolean isValid() {
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    return false;
-                }
-            }
-
-            for (final Relationship undef : getUndefinedRelationships()) {
-                if (!isAutoTerminated(undef)) {
-                    return false;
-                }
-            }
-        } catch (final Throwable t) {
-            return false;
-        } finally {
-            readLock.unlock();
-        }
-
-        return true;
-    }
-
-    @Override
-    public Collection<ValidationResult> getValidationErrors() {
-        final List<ValidationResult> results = new ArrayList<>();
-        readLock.lock();
-        try {
-            final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
-
-            final Collection<ValidationResult> validationResults;
-            try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-                validationResults = getProcessor().validate(validationContext);
-            }
-
-            for (final ValidationResult result : validationResults) {
-                if (!result.isValid()) {
-                    results.add(result);
-                }
-            }
-
-            for (final Relationship relationship : getUndefinedRelationships()) {
-                if (!isAutoTerminated(relationship)) {
-                    final ValidationResult error = new ValidationResult.Builder()
-                            .explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
-                            .subject("Relationship " + relationship.getName())
-                            .valid(false)
-                            .build();
-                    results.add(error);
-                }
-            }
-        } catch (final Throwable t) {
-            results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
-        } finally {
-            readLock.unlock();
-        }
-        return results;
-    }
-
-    /**
-     * Establishes node equality (based on the processor's identifier)
-     *
-     * @param other node
-     * @return true if equal
-     */
-    @Override
-    public boolean equals(final Object other) {
-        if (!(other instanceof ProcessorNode)) {
-            return false;
-        }
-        final ProcessorNode on = (ProcessorNode) other;
-        return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
-    }
-
-    @Override
-    public int hashCode() {
-        return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
-    }
-
-    @Override
-    public Collection<Relationship> getRelationships() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().getRelationships();
-        }
-    }
-
-    @Override
-    public String toString() {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            return getProcessor().toString();
-        }
-    }
-
-    @Override
-    public ProcessGroup getProcessGroup() {
-        return processGroup.get();
-    }
-
-    @Override
-    public void setProcessGroup(final ProcessGroup group) {
-        writeLock.lock();
-        try {
-            this.processGroup.set(group);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
-        try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
-            processor.onTrigger(context, sessionFactory);
-        }
-    }
-
-    @Override
-    public ConnectableType getConnectableType() {
-        return ConnectableType.PROCESSOR;
-    }
-
-    @Override
-    public void setScheduledState(final ScheduledState scheduledState) {
-        this.scheduledState.set(scheduledState);
-        if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
-            yieldExpiration.set(0L);
-        }
-    }
-
-    @Override
-    public void setAnnotationData(final String data) {
-        writeLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException("Cannot set AnnotationData while processor is running");
-            }
-
-            this.annotationData.set(data);
-        } finally {
-            writeLock.unlock();
-        }
-    }
-
-    @Override
-    public String getAnnotationData() {
-        return annotationData.get();
-    }
-
-    @Override
-    public Collection<ValidationResult> validate(final ValidationContext validationContext) {
-        return processor.validate(validationContext);
-    }
-
-    @Override
-    public void verifyCanDelete() throws IllegalStateException {
-        verifyCanDelete(false);
-    }
-
-    @Override
-    public void verifyCanDelete(final boolean ignoreConnections) {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is running");
-            }
-
-            if (!ignoreConnections) {
-                for (final Set<Connection> connectionSet : connections.values()) {
-                    for (final Connection connection : connectionSet) {
-                        connection.verifyCanDelete();
-                    }
-                }
-
-                for (final Connection connection : incomingConnectionsRef.get()) {
-                    if (connection.getSource().equals(this)) {
-                        connection.verifyCanDelete();
-                    } else {
-                        throw new IllegalStateException(this + " is the destination of another component");
-                    }
-                }
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart() {
-        readLock.lock();
-        try {
-            switch (getScheduledState()) {
-                case DISABLED:
-                    throw new IllegalStateException(this + " cannot be started because it is disabled");
-                case RUNNING:
-                    throw new IllegalStateException(this + " cannot be started because it is already running");
-                case STOPPED:
-                    break;
-            }
-            verifyNoActiveThreads();
-
-            if (!isValid()) {
-                throw new IllegalStateException(this + " is not in a valid state");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
-        switch (getScheduledState()) {
-            case DISABLED:
-                throw new IllegalStateException(this + " cannot be started because it is disabled");
-            case RUNNING:
-                throw new IllegalStateException(this + " cannot be started because it is already running");
-            case STOPPED:
-                break;
-        }
-        verifyNoActiveThreads();
-
-        final Set<String> ids = new HashSet<>();
-        for (final ControllerServiceNode node : ignoredReferences) {
-            ids.add(node.getIdentifier());
-        }
-
-        final Collection<ValidationResult> validationResults = getValidationErrors(ids);
-        for (final ValidationResult result : validationResults) {
-            if (!result.isValid()) {
-                throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
-            }
-        }
-    }
-
-    @Override
-    public void verifyCanStop() {
-        if (getScheduledState() != ScheduledState.RUNNING) {
-            throw new IllegalStateException(this + " is not scheduled to run");
-        }
-    }
-
-    @Override
-    public void verifyCanUpdate() {
-        readLock.lock();
-        try {
-            if (isRunning()) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanEnable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.DISABLED) {
-                throw new IllegalStateException(this + " is not disabled");
-            }
-
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    @Override
-    public void verifyCanDisable() {
-        readLock.lock();
-        try {
-            if (getScheduledState() != ScheduledState.STOPPED) {
-                throw new IllegalStateException(this + " is not stopped");
-            }
-            verifyNoActiveThreads();
-        } finally {
-            readLock.unlock();
-        }
-    }
-
-    private void verifyNoActiveThreads() throws IllegalStateException {
-        final int threadCount = processScheduler.getActiveThreadCount(this);
-        if (threadCount > 0) {
-            throw new IllegalStateException(this + " has " + threadCount + " threads still active");
-        }
-    }
-
-    @Override
-    public void verifyModifiable() throws IllegalStateException {
-        if (isRunning()) {
-            throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
-        }
-    }
+	public static final String BULLETIN_OBSERVER_ID = "bulletin-observer";
+
+	public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+	public static final String DEFAULT_YIELD_PERIOD = "1 sec";
+	public static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
+	private final AtomicReference<ProcessGroup> processGroup;
+	private final Processor processor;
+	private final AtomicReference<String> identifier;
+	private final Map<Connection, Connectable> destinations;
+	private final Map<Relationship, Set<Connection>> connections;
+	private final AtomicReference<Set<Relationship>> undefinedRelationshipsToTerminate;
+	private final AtomicReference<List<Connection>> incomingConnectionsRef;
+	private final ReentrantReadWriteLock rwLock;
+	private final Lock readLock;
+	private final Lock writeLock;
+	private final AtomicBoolean isolated;
+	private final AtomicBoolean lossTolerant;
+	private final AtomicReference<ScheduledState> scheduledState;
+	private final AtomicReference<String> comments;
+	private final AtomicReference<String> name;
+	private final AtomicReference<Position> position;
+	private final AtomicReference<String> annotationData;
+	private final AtomicReference<String> schedulingPeriod; // stored as string so it's presented to user as they entered it
+	private final AtomicReference<String> yieldPeriod;
+	private final AtomicReference<String> penalizationPeriod;
+	private final AtomicReference<Map<String, String>> style;
+	private final AtomicInteger concurrentTaskCount;
+	private final AtomicLong yieldExpiration;
+	private final AtomicLong schedulingNanos;
+	private final boolean triggerWhenEmpty;
+	private final boolean sideEffectFree;
+	private final boolean triggeredSerially;
+	private final boolean triggerWhenAnyDestinationAvailable;
+	private final boolean eventDrivenSupported;
+	private final boolean batchSupported;
+	private final Requirement inputRequirement;
+	private final ValidationContextFactory validationContextFactory;
+	private final ProcessScheduler processScheduler;
+	private long runNanos = 0L;
+
+	private SchedulingStrategy schedulingStrategy;  // guarded by read/write lock
+
+	@SuppressWarnings("deprecation")
+	public StandardProcessorNode(final Processor processor, final String uuid, final ValidationContextFactory validationContextFactory,
+		final ProcessScheduler scheduler, final ControllerServiceProvider controllerServiceProvider) {
+		super(processor, uuid, validationContextFactory, controllerServiceProvider);
+
+		this.processor = processor;
+		identifier = new AtomicReference<>(uuid);
+		destinations = new HashMap<>();
+		connections = new HashMap<>();
+		incomingConnectionsRef = new AtomicReference<List<Connection>>(new ArrayList<Connection>());
+		scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+		rwLock = new ReentrantReadWriteLock(false);
+		readLock = rwLock.readLock();
+		writeLock = rwLock.writeLock();
+		lossTolerant = new AtomicBoolean(false);
+		final Set<Relationship> emptySetOfRelationships = new HashSet<>();
+		undefinedRelationshipsToTerminate = new AtomicReference<>(emptySetOfRelationships);
+		comments = new AtomicReference<>("");
+		name = new AtomicReference<>(processor.getClass().getSimpleName());
+		schedulingPeriod = new AtomicReference<>("0 sec");
+		schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+		yieldPeriod = new AtomicReference<>(DEFAULT_YIELD_PERIOD);
+		yieldExpiration = new AtomicLong(0L);
+		concurrentTaskCount = new AtomicInteger(1);
+		position = new AtomicReference<>(new Position(0D, 0D));
+		style = new AtomicReference<>(Collections.unmodifiableMap(new HashMap<String, String>()));
+		this.processGroup = new AtomicReference<>();
+		processScheduler = scheduler;
+		annotationData = new AtomicReference<>();
+		isolated = new AtomicBoolean(false);
+		penalizationPeriod = new AtomicReference<>(DEFAULT_PENALIZATION_PERIOD);
+
+		final Class<?> procClass = processor.getClass();
+		triggerWhenEmpty = procClass.isAnnotationPresent(TriggerWhenEmpty.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenEmpty.class);
+		sideEffectFree = procClass.isAnnotationPresent(SideEffectFree.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SideEffectFree.class);
+		batchSupported = procClass.isAnnotationPresent(SupportsBatching.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.SupportsBatching.class);
+		triggeredSerially = procClass.isAnnotationPresent(TriggerSerially.class) || procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerSerially.class);
+		triggerWhenAnyDestinationAvailable = procClass.isAnnotationPresent(TriggerWhenAnyDestinationAvailable.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.TriggerWhenAnyDestinationAvailable.class);
+		this.validationContextFactory = validationContextFactory;
+		eventDrivenSupported = (procClass.isAnnotationPresent(EventDriven.class)
+			|| procClass.isAnnotationPresent(org.apache.nifi.processor.annotation.EventDriven.class)) && !triggeredSerially && !triggerWhenEmpty;
+
+		final boolean inputRequirementPresent = procClass.isAnnotationPresent(InputRequirement.class);
+		if (inputRequirementPresent) {
+			inputRequirement = procClass.getAnnotation(InputRequirement.class).value();
+		} else {
+			inputRequirement = Requirement.INPUT_ALLOWED;
+		}
+
+		schedulingStrategy = SchedulingStrategy.TIMER_DRIVEN;
+	}
+
+	/**
+	 * @return comments about this specific processor instance
+	 */
+	@Override
+	public String getComments() {
+		return comments.get();
+	}
+
+	/**
+	 * Provides and opportunity to retain information about this particular processor instance
+	 *
+	 * @param comments new comments
+	 */
+	@Override
+	public void setComments(final String comments) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.comments.set(comments);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public ScheduledState getScheduledState() {
+		return scheduledState.get();
+	}
+
+	@Override
+	public Position getPosition() {
+		return position.get();
+	}
+
+	@Override
+	public void setPosition(Position position) {
+		this.position.set(position);
+	}
+
+	@Override
+	public Map<String, String> getStyle() {
+		return style.get();
+	}
+
+	@Override
+	public void setStyle(final Map<String, String> style) {
+		if (style != null) {
+			this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
+		}
+	}
+
+	@Override
+	public String getIdentifier() {
+		return identifier.get();
+	}
+
+	/**
+	 * @return if true flow file content generated by this processor is considered loss tolerant
+	 */
+	@Override
+	public boolean isLossTolerant() {
+		return lossTolerant.get();
+	}
+
+	@Override
+	public boolean isIsolated() {
+		return isolated.get();
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenEmpty} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenEmpty() {
+		return triggerWhenEmpty;
+	}
+
+	/**
+	 * @return true if the processor has the {@link SideEffectFree} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isSideEffectFree() {
+		return sideEffectFree;
+	}
+
+	@Override
+	public boolean isHighThroughputSupported() {
+		return batchSupported;
+	}
+
+	/**
+	 * @return true if the processor has the {@link TriggerWhenAnyDestinationAvailable} annotation, false otherwise.
+	 */
+	@Override
+	public boolean isTriggerWhenAnyDestinationAvailable() {
+		return triggerWhenAnyDestinationAvailable;
+	}
+
+	/**
+	 * Indicates whether flow file content made by this processor must be persisted
+	 *
+	 * @param lossTolerant tolerant
+	 */
+	@Override
+	public void setLossTolerant(final boolean lossTolerant) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.lossTolerant.set(lossTolerant);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Indicates whether the processor runs on only the primary node.
+	 *
+	 * @param isolated isolated
+	 */
+	public void setIsolated(final boolean isolated) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.isolated.set(isolated);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isAutoTerminated(final Relationship relationship) {
+		final Set<Relationship> terminatable = undefinedRelationshipsToTerminate.get();
+		if (terminatable == null) {
+			return false;
+		}
+		return terminatable.contains(relationship);
+	}
+
+	@Override
+	public void setAutoTerminatedRelationships(final Set<Relationship> terminate) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			for (final Relationship rel : terminate) {
+				if (!getConnections(rel).isEmpty()) {
+					throw new IllegalStateException("Cannot mark relationship '" + rel.getName() + "' as auto-terminated because Connection already exists with this relationship");
+				}
+			}
+			undefinedRelationshipsToTerminate.set(new HashSet<>(terminate));
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return an unmodifiable Set that contains all of the ProcessorRelationship objects that are configured to be auto-terminated
+	 */
+	@Override
+	public Set<Relationship> getAutoTerminatedRelationships() {
+		Set<Relationship> relationships = undefinedRelationshipsToTerminate.get();
+		if (relationships == null) {
+			relationships = new HashSet<>();
+		}
+		return Collections.unmodifiableSet(relationships);
+	}
+
+	@Override
+	public String getName() {
+		return name.get();
+	}
+
+	/**
+	 * @return the value of the processor's {@link CapabilityDescription} annotation, if one exists, else <code>null</code>.
+	 */
+	@SuppressWarnings("deprecation")
+	public String getProcessorDescription() {
+		CapabilityDescription capDesc = processor.getClass().getAnnotation(CapabilityDescription.class);
+		String description = null;
+		if (capDesc != null) {
+			description = capDesc.value();
+		} else {
+			final org.apache.nifi.processor.annotation.CapabilityDescription deprecatedCapDesc
+			= processor.getClass().getAnnotation(org.apache.nifi.processor.annotation.CapabilityDescription.class);
+			if (deprecatedCapDesc != null) {
+				description = deprecatedCapDesc.value();
+			}
+		}
+
+		return description;
+	}
+
+	@Override
+	public void setName(final String name) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			this.name.set(name);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @param timeUnit determines the unit of time to represent the scheduling period. If null will be reported in units of {@link #DEFAULT_SCHEDULING_TIME_UNIT}
+	 * @return the schedule period that should elapse before subsequent cycles of this processor's tasks
+	 */
+	@Override
+	public long getSchedulingPeriod(final TimeUnit timeUnit) {
+		return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+	}
+
+	@Override
+	public boolean isEventDrivenSupported() {
+		readLock.lock();
+		try {
+			return this.eventDrivenSupported;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	/**
+	 * Updates the Scheduling Strategy used for this Processor
+	 *
+	 * @param schedulingStrategy strategy
+	 *
+	 * @throws IllegalArgumentException if the SchedulingStrategy is not not applicable for this Processor
+	 */
+	@Override
+	public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) {
+		writeLock.lock();
+		try {
+			if (schedulingStrategy == SchedulingStrategy.EVENT_DRIVEN && !eventDrivenSupported) {
+				// not valid. Just ignore it. We don't throw an Exception because if a developer changes a Processor so that
+				// it no longer supports EventDriven mode, we don't want the app to fail to startup if it was already in Event-Driven
+				// Mode. Instead, we will simply leave it in Timer-Driven mode
+				return;
+			}
+
+			this.schedulingStrategy = schedulingStrategy;
+			setIsolated(schedulingStrategy == SchedulingStrategy.PRIMARY_NODE_ONLY);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * @return the currently configured scheduling strategy
+	 */
+	@Override
+	public SchedulingStrategy getSchedulingStrategy() {
+		readLock.lock();
+		try {
+			return this.schedulingStrategy;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public String getSchedulingPeriod() {
+		return schedulingPeriod.get();
+	}
+
+	@Override
+	public void setScheduldingPeriod(final String schedulingPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+
+			switch (schedulingStrategy) {
+				case CRON_DRIVEN: {
+					try {
+						new CronExpression(schedulingPeriod);
+					} catch (final Exception e) {
+						throw new IllegalArgumentException("Scheduling Period is not a valid cron expression: " + schedulingPeriod);
+					}
+				}
+				break;
+				case PRIMARY_NODE_ONLY:
+				case TIMER_DRIVEN: {
+					final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+					if (schedulingNanos < 0) {
+						throw new IllegalArgumentException("Scheduling Period must be positive");
+					}
+					this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+				}
+				break;
+				case EVENT_DRIVEN:
+				default:
+					return;
+			}
+
+			this.schedulingPeriod.set(schedulingPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getRunDuration(final TimeUnit timeUnit) {
+		readLock.lock();
+		try {
+			return timeUnit.convert(this.runNanos, TimeUnit.NANOSECONDS);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void setRunDuration(final long duration, final TimeUnit timeUnit) {
+		writeLock.lock();
+		try {
+			if (duration < 0) {
+				throw new IllegalArgumentException("Run Duration must be non-negative value; cannot set to " + timeUnit.toSeconds(duration) + " seconds");
+			}
+
+			this.runNanos = timeUnit.toNanos(duration);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public long getYieldPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getYieldPeriod() {
+		return yieldPeriod.get();
+	}
+
+	@Override
+	public void setYieldPeriod(final String yieldPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+			if (yieldMillis < 0) {
+				throw new IllegalArgumentException("Yield duration must be positive");
+			}
+			this.yieldPeriod.set(yieldPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Causes the processor not to be scheduled for some period of time. This duration can be obtained and set via the {@link #getYieldPeriod(TimeUnit)} and {@link #setYieldPeriod(long, TimeUnit)}
+	 * methods.
+	 */
+	@Override
+	public void yield() {
+		final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+		yield(yieldMillis, TimeUnit.MILLISECONDS);
+
+		final String yieldDuration = (yieldMillis > 1000) ? (yieldMillis / 1000) + " seconds" : yieldMillis + " milliseconds";
+		LoggerFactory.getLogger(processor.getClass()).debug("{} has chosen to yield its resources; will not be scheduled to run again for {}", processor, yieldDuration);
+	}
+
+	@Override
+	public void yield(final long period, final TimeUnit timeUnit) {
+		final long yieldMillis = TimeUnit.MILLISECONDS.convert(period, timeUnit);
+		yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+
+		processScheduler.yield(this);
+	}
+
+	/**
+	 * @return the number of milliseconds since Epoch at which time this processor is to once again be scheduled.
+	 */
+	@Override
+	public long getYieldExpiration() {
+		return yieldExpiration.get();
+	}
+
+	@Override
+	public long getPenalizationPeriod(final TimeUnit timeUnit) {
+		return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+	}
+
+	@Override
+	public String getPenalizationPeriod() {
+		return penalizationPeriod.get();
+	}
+
+	@Override
+	public void setPenalizationPeriod(final String penalizationPeriod) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			final long penalizationMillis = FormatUtils.getTimeDuration(requireNonNull(penalizationPeriod), TimeUnit.MILLISECONDS);
+			if (penalizationMillis < 0) {
+				throw new IllegalArgumentException("Penalization duration must be positive");
+			}
+			this.penalizationPeriod.set(penalizationPeriod);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	/**
+	 * Determines the number of concurrent tasks that may be running for this processor.
+	 *
+	 * @param taskCount a number of concurrent tasks this processor may have running
+	 * @throws IllegalArgumentException if the given value is less than 1
+	 */
+	@Override
+	public void setMaxConcurrentTasks(final int taskCount) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+			}
+			if (taskCount < 1 && getSchedulingStrategy() != SchedulingStrategy.EVENT_DRIVEN) {
+				throw new IllegalArgumentException();
+			}
+			if (!triggeredSerially) {
+				concurrentTaskCount.set(taskCount);
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isTriggeredSerially() {
+		return triggeredSerially;
+	}
+
+	/**
+	 * @return the number of tasks that may execute concurrently for this processor
+	 */
+	@Override
+	public int getMaxConcurrentTasks() {
+		return concurrentTaskCount.get();
+	}
+
+	@Override
+	public LogLevel getBulletinLevel() {
+		return LogRepositoryFactory.getRepository(getIdentifier()).getObservationLevel(BULLETIN_OBSERVER_ID);
+	}
+
+	@Override
+	public void setBulletinLevel(final LogLevel level) {
+		LogRepositoryFactory.getRepository(getIdentifier()).setObservationLevel(BULLETIN_OBSERVER_ID, level);
+	}
+
+	@Override
+	public Set<Connection> getConnections() {
+		final Set<Connection> allConnections = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Set<Connection> connectionSet : connections.values()) {
+				allConnections.addAll(connectionSet);
+			}
+		} finally {
+			readLock.unlock();
+		}
+
+		return allConnections;
+	}
+
+	@Override
+	public List<Connection> getIncomingConnections() {
+		return incomingConnectionsRef.get();
+	}
+
+	@Override
+	public Set<Connection> getConnections(final Relationship relationship) {
+		final Set<Connection> applicableConnections;
+		readLock.lock();
+		try {
+			applicableConnections = connections.get(relationship);
+		} finally {
+			readLock.unlock();
+		}
+		return (applicableConnections == null) ? Collections.<Connection>emptySet() : Collections.unmodifiableSet(applicableConnections);
+	}
+
+	@Override
+	public void addConnection(final Connection connection) {
+		Objects.requireNonNull(connection, "connection cannot be null");
+
+		if (!connection.getSource().equals(this) && !connection.getDestination().equals(this)) {
+			throw new IllegalStateException("Cannot a connection to a ProcessorNode for which the ProcessorNode is neither the Source nor the Destination");
+		}
+
+		writeLock.lock();
+		try {
+			List<Connection> updatedIncoming = null;
+			if (connection.getDestination().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				updatedIncoming = new ArrayList<>(incomingConnections);
+				if (!updatedIncoming.contains(connection)) {
+					updatedIncoming.add(connection);
+				}
+			}
+
+			if (connection.getSource().equals(this)) {
+				// don't add the connection twice. This may occur if we have a self-loop because we will be told
+				// to add the connection once because we are the source and again because we are the destination.
+				if (!destinations.containsKey(connection)) {
+					for (final Relationship relationship : connection.getRelationships()) {
+						final Relationship rel = getRelationship(relationship.getName());
+						Set<Connection> set = connections.get(rel);
+						if (set == null) {
+							set = new HashSet<>();
+							connections.put(rel, set);
+						}
+
+						set.add(connection);
+
+						destinations.put(connection, connection.getDestination());
+					}
+
+					final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+					if (autoTerminated != null) {
+						autoTerminated.removeAll(connection.getRelationships());
+						this.undefinedRelationshipsToTerminate.set(autoTerminated);
+					}
+				}
+			}
+
+			if (updatedIncoming != null) {
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			}
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean hasIncomingConnection() {
+		return !incomingConnectionsRef.get().isEmpty();
+	}
+
+	@Override
+	public void updateConnection(final Connection connection) throws IllegalStateException {
+		if (requireNonNull(connection).getSource().equals(this)) {
+			writeLock.lock();
+			try {
+				//
+				// update any relationships
+				//
+				// first check if any relations were removed.
+				final List<Relationship> existingRelationships = new ArrayList<>();
+				for (final Map.Entry<Relationship, Set<Connection>> entry : connections.entrySet()) {
+					if (entry.getValue().contains(connection)) {
+						existingRelationships.add(entry.getKey());
+					}
+				}
+
+				for (final Relationship rel : connection.getRelationships()) {
+					if (!existingRelationships.contains(rel)) {
+						// relationship was removed. Check if this is legal.
+						final Set<Connection> connectionsForRelationship = getConnections(rel);
+						if (connectionsForRelationship != null && connectionsForRelationship.size() == 1 && this.isRunning() && !isAutoTerminated(rel) && getRelationships().contains(rel)) {
+							// if we are running and we do not terminate undefined relationships and this is the only
+							// connection that defines the given relationship, and that relationship is required,
+							// then it is not legal to remove this relationship from this connection.
+							throw new IllegalStateException("Cannot remove relationship " + rel.getName() + " from Connection because doing so would invalidate Processor "
+								+ this + ", which is currently running");
+						}
+					}
+				}
+
+				// remove the connection from any list that currently contains
+				for (final Set<Connection> list : connections.values()) {
+					list.remove(connection);
+				}
+
+				// add the connection in for all relationships listed.
+				for (final Relationship rel : connection.getRelationships()) {
+					Set<Connection> set = connections.get(rel);
+					if (set == null) {
+						set = new HashSet<>();
+						connections.put(rel, set);
+					}
+					set.add(connection);
+				}
+
+				// update to the new destination
+				destinations.put(connection, connection.getDestination());
+
+				final Set<Relationship> autoTerminated = this.undefinedRelationshipsToTerminate.get();
+				if (autoTerminated != null) {
+					autoTerminated.removeAll(connection.getRelationships());
+					this.undefinedRelationshipsToTerminate.set(autoTerminated);
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				// update our incoming connections -- we can just remove & re-add the connection to
+				// update the list.
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+				updatedIncoming.remove(connection);
+				updatedIncoming.add(connection);
+				incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+			} finally {
+				writeLock.unlock();
+			}
+		}
+	}
+
+	@Override
+	public void removeConnection(final Connection connection) {
+		boolean connectionRemoved = false;
+
+		if (requireNonNull(connection).getSource().equals(this)) {
+			for (final Relationship relationship : connection.getRelationships()) {
+				final Set<Connection> connectionsForRelationship = getConnections(relationship);
+				if ((connectionsForRelationship == null || connectionsForRelationship.size() <= 1) && isRunning()) {
+					throw new IllegalStateException("This connection cannot be removed because its source is running and removing it will invalidate this processor");
+				}
+			}
+
+			writeLock.lock();
+			try {
+				for (final Set<Connection> connectionList : this.connections.values()) {
+					connectionList.remove(connection);
+				}
+
+				connectionRemoved = (destinations.remove(connection) != null);
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (connection.getDestination().equals(this)) {
+			writeLock.lock();
+			try {
+				final List<Connection> incomingConnections = incomingConnectionsRef.get();
+				if (incomingConnections.contains(connection)) {
+					final List<Connection> updatedIncoming = new ArrayList<>(incomingConnections);
+					updatedIncoming.remove(connection);
+					incomingConnectionsRef.set(Collections.unmodifiableList(updatedIncoming));
+					return;
+				}
+			} finally {
+				writeLock.unlock();
+			}
+		}
+
+		if (!connectionRemoved) {
+			throw new IllegalArgumentException("Cannot remove a connection from a ProcessorNode for which the ProcessorNode is not the Source");
+		}
+	}
+
+	/**
+	 * @param relationshipName name
+	 * @return the relationship for this nodes processor for the given name or creates a new relationship for the given name
+	 */
+	@Override
+	public Relationship getRelationship(final String relationshipName) {
+		final Relationship specRel = new Relationship.Builder().name(relationshipName).build();
+		Relationship returnRel = specRel;
+
+		final Set<Relationship> relationships;
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			relationships = processor.getRelationships();
+		}
+
+		for (final Relationship rel : relationships) {
+			if (rel.equals(specRel)) {
+				returnRel = rel;
+				break;
+			}
+		}
+		return returnRel;
+	}
+
+	@Override
+	public Processor getProcessor() {
+		return this.processor;
+	}
+
+	/**
+	 * @return the Set of destination processors for all relationships excluding any destinations that are this processor itself (self-loops)
+	 */
+	public Set<Connectable> getDestinations() {
+		final Set<Connectable> nonSelfDestinations = new HashSet<>();
+		readLock.lock();
+		try {
+			for (final Connectable connectable : destinations.values()) {
+				if (connectable != this) {
+					nonSelfDestinations.add(connectable);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return nonSelfDestinations;
+	}
+
+	public Set<Connectable> getDestinations(final Relationship relationship) {
+		readLock.lock();
+		try {
+			final Set<Connectable> destinationSet = new HashSet<>();
+			final Set<Connection> relationshipConnections = connections.get(relationship);
+			if (relationshipConnections != null) {
+				for (final Connection connection : relationshipConnections) {
+					destinationSet.add(destinations.get(connection));
+				}
+			}
+			return destinationSet;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	public Set<Relationship> getUndefinedRelationships() {
+		final Set<Relationship> undefined = new HashSet<>();
+		readLock.lock();
+		try {
+			final Set<Relationship> relationships;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				relationships = processor.getRelationships();
+			}
+
+			if (relationships == null) {
+				return undefined;
+			}
+			for (final Relationship relation : relationships) {
+				final Set<Connection> connectionSet = this.connections.get(relation);
+				if (connectionSet == null || connectionSet.isEmpty()) {
+					undefined.add(relation);
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+		return undefined;
+	}
+
+	/**
+	 * Determines if the given node is a destination for this node
+	 *
+	 * @param node node
+	 * @return true if is a direct destination node; false otherwise
+	 */
+	boolean isRelated(final ProcessorNode node) {
+		readLock.lock();
+		try {
+			return this.destinations.containsValue(node);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isRunning() {
+		readLock.lock();
+		try {
+			return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public int getActiveThreadCount() {
+		readLock.lock();
+		try {
+			return processScheduler.getActiveThreadCount(this);
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public boolean isValid() {
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					return false;
+				}
+			}
+
+			for (final Relationship undef : getUndefinedRelationships()) {
+				if (!isAutoTerminated(undef)) {
+					return false;
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					if (!getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						return false;
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			return false;
+		} finally {
+			readLock.unlock();
+		}
+
+		return true;
+	}
+
+	@Override
+	public Collection<ValidationResult> getValidationErrors() {
+		final List<ValidationResult> results = new ArrayList<>();
+		readLock.lock();
+		try {
+			final ValidationContext validationContext = validationContextFactory.newValidationContext(getProperties(), getAnnotationData());
+
+			final Collection<ValidationResult> validationResults;
+			try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+				validationResults = getProcessor().validate(validationContext);
+			}
+
+			for (final ValidationResult result : validationResults) {
+				if (!result.isValid()) {
+					results.add(result);
+				}
+			}
+
+			for (final Relationship relationship : getUndefinedRelationships()) {
+				if (!isAutoTerminated(relationship)) {
+					final ValidationResult error = new ValidationResult.Builder()
+						.explanation("Relationship '" + relationship.getName() + "' is not connected to any component and is not auto-terminated")
+						.subject("Relationship " + relationship.getName())
+						.valid(false)
+						.build();
+					results.add(error);
+				}
+			}
+
+			switch (getInputRequirement()) {
+				case INPUT_ALLOWED:
+					break;
+				case INPUT_FORBIDDEN: {
+					final int incomingConnCount = getIncomingConnections().size();
+					if (incomingConnCount != 0) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+				case INPUT_REQUIRED: {
+					if (getIncomingConnections().isEmpty()) {
+						results.add(new ValidationResult.Builder()
+							.explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
+							.subject("Incoming Connections")
+							.valid(false)
+							.build());
+					}
+					break;
+				}
+			}
+		} catch (final Throwable t) {
+			results.add(new ValidationResult.Builder().explanation("Failed to run validation due to " + t.toString()).valid(false).build());
+		} finally {
+			readLock.unlock();
+		}
+		return results;
+	}
+
+	@Override
+	public Requirement getInputRequirement() {
+		return inputRequirement;
+	}
+
+	/**
+	 * Establishes node equality (based on the processor's identifier)
+	 *
+	 * @param other node
+	 * @return true if equal
+	 */
+	@Override
+	public boolean equals(final Object other) {
+		if (!(other instanceof ProcessorNode)) {
+			return false;
+		}
+		final ProcessorNode on = (ProcessorNode) other;
+		return new EqualsBuilder().append(identifier.get(), on.getIdentifier()).isEquals();
+	}
+
+	@Override
+	public int hashCode() {
+		return new HashCodeBuilder(7, 67).append(identifier).toHashCode();
+	}
+
+	@Override
+	public Collection<Relationship> getRelationships() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().getRelationships();
+		}
+	}
+
+	@Override
+	public String toString() {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			return getProcessor().toString();
+		}
+	}
+
+	@Override
+	public ProcessGroup getProcessGroup() {
+		return processGroup.get();
+	}
+
+	@Override
+	public void setProcessGroup(final ProcessGroup group) {
+		writeLock.lock();
+		try {
+			this.processGroup.set(group);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) {
+		try (final NarCloseable narCloseable = NarCloseable.withNarLoader()) {
+			processor.onTrigger(context, sessionFactory);
+		}
+	}
+
+	@Override
+	public ConnectableType getConnectableType() {
+		return ConnectableType.PROCESSOR;
+	}
+
+	@Override
+	public void setScheduledState(final ScheduledState scheduledState) {
+		this.scheduledState.set(scheduledState);
+		if (!scheduledState.equals(ScheduledState.RUNNING)) {   // if user stops processor, clear yield expiration
+			yieldExpiration.set(0L);
+		}
+	}
+
+	@Override
+	public void setAnnotationData(final String data) {
+		writeLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException("Cannot set AnnotationData while processor is running");
+			}
+
+			this.annotationData.set(data);
+		} finally {
+			writeLock.unlock();
+		}
+	}
+
+	@Override
+	public String getAnnotationData() {
+		return annotationData.get();
+	}
+
+	@Override
+	public Collection<ValidationResult> validate(final ValidationContext validationContext) {
+		return getValidationErrors();
+	}
+
+	@Override
+	public void verifyCanDelete() throws IllegalStateException {
+		verifyCanDelete(false);
+	}
+
+	@Override
+	public void verifyCanDelete(final boolean ignoreConnections) {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is running");
+			}
+
+			if (!ignoreConnections) {
+				for (final Set<Connection> connectionSet : connections.values()) {
+					for (final Connection connection : connectionSet) {
+						connection.verifyCanDelete();
+					}
+				}
+
+				for (final Connection connection : incomingConnectionsRef.get()) {
+					if (connection.getSource().equals(this)) {
+						connection.verifyCanDelete();
+					} else {
+						throw new IllegalStateException(this + " is the destination of another component");
+					}
+				}
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart() {
+		readLock.lock();
+		try {
+			switch (getScheduledState()) {
+				case DISABLED:
+					throw new IllegalStateException(this + " cannot be started because it is disabled");
+				case RUNNING:
+					throw new IllegalStateException(this + " cannot be started because it is already running");
+				case STOPPED:
+					break;
+			}
+			verifyNoActiveThreads();
+
+			if (!isValid()) {
+				throw new IllegalStateException(this + " is not in a valid state");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) {
+		switch (getScheduledState()) {
+			case DISABLED:
+				throw new IllegalStateException(this + " cannot be started because it is disabled");
+			case RUNNING:
+				throw new IllegalStateException(this + " cannot be started because it is already running");
+			case STOPPED:
+				break;
+		}
+		verifyNoActiveThreads();
+
+		final Set<String> ids = new HashSet<>();
+		for (final ControllerServiceNode node : ignoredReferences) {
+			ids.add(node.getIdentifier());
+		}
+
+		final Collection<ValidationResult> validationResults = getValidationErrors(ids);
+		for (final ValidationResult result : validationResults) {
+			if (!result.isValid()) {
+				throw new IllegalStateException(this + " cannot be started because it is not valid: " + result);
+			}
+		}
+	}
+
+	@Override
+	public void verifyCanStop() {
+		if (getScheduledState() != ScheduledState.RUNNING) {
+			throw new IllegalStateException(this + " is not scheduled to run");
+		}
+	}
+
+	@Override
+	public void verifyCanUpdate() {
+		readLock.lock();
+		try {
+			if (isRunning()) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanEnable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.DISABLED) {
+				throw new IllegalStateException(this + " is not disabled");
+			}
+
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	@Override
+	public void verifyCanDisable() {
+		readLock.lock();
+		try {
+			if (getScheduledState() != ScheduledState.STOPPED) {
+				throw new IllegalStateException(this + " is not stopped");
+			}
+			verifyNoActiveThreads();
+		} finally {
+			readLock.unlock();
+		}
+	}
+
+	private void verifyNoActiveThreads() throws IllegalStateException {
+		final int threadCount = processScheduler.getActiveThreadCount(this);
+		if (threadCount > 0) {
+			throw new IllegalStateException(this + " has " + threadCount + " threads still active");
+		}
+	}
+
+	@Override
+	public void verifyModifiable() throws IllegalStateException {
+		if (isRunning()) {
+			throw new IllegalStateException("Cannot modify Processor configuration while the Processor is running");
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index b766878..eccff79 100644
--- a/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi-nar-bundles/nifi-geo-bundle/nifi-geo-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -31,6 +31,8 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -59,6 +61,7 @@ import com.maxmind.geoip2.record.Subdivision;
 @SideEffectFree
 @SupportsBatching
 @Tags({"geo", "enrich", "ip", "maxmind"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Looks up geolocation information for an IP address and adds the geo information to FlowFile attributes. The "
         + "geo data is provided as a MaxMind database. The attribute that contains the IP address to lookup is provided by the "
         + "'IP Address Attribute' property. If the name of the attribute provided is 'X', then the the attributes added by enrichment "


[11/17] nifi git commit: NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree

Posted by ma...@apache.org.
NIFI-810: Created RequiresInput annotation and ensure that processors are invalid if connections do not agree


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/4afd8f88
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/4afd8f88
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/4afd8f88

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 4afd8f88f8a34cf87f2a06221667166a54c99a15
Parents: 31fba6b
Author: Mark Payne <ma...@hotmail.com>
Authored: Fri Sep 25 11:39:28 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:26:14 2015 -0400

----------------------------------------------------------------------
 .../annotation/behavior/InputRequirement.java   |   51 +
 .../src/main/asciidoc/developer-guide.adoc      |   11 +
 .../nifi/processors/avro/ConvertAvroToJSON.java |    3 +
 .../processors/avro/ExtractAvroMetadata.java    |   29 +-
 .../apache/nifi/processors/avro/SplitAvro.java  |   27 +-
 .../nifi/processors/aws/s3/FetchS3Object.java   |    3 +
 .../nifi/processors/aws/s3/PutS3Object.java     |    6 +-
 .../apache/nifi/processors/aws/sns/PutSNS.java  |    3 +
 .../nifi/processors/aws/sqs/DeleteSQS.java      |    3 +
 .../apache/nifi/processors/aws/sqs/GetSQS.java  |    5 +-
 .../apache/nifi/processors/aws/sqs/PutSQS.java  |    5 +-
 .../nifi/processors/flume/ExecuteFlumeSink.java |   14 +-
 .../processors/flume/ExecuteFlumeSource.java    |   14 +-
 .../apache/nifi/controller/ProcessorNode.java   |   89 +-
 .../nifi/controller/StandardProcessorNode.java  | 2440 +++++++++---------
 .../org/apache/nifi/processors/GeoEnrichIP.java |    3 +
 .../hadoop/CreateHadoopSequenceFile.java        |    4 +-
 .../nifi/processors/hadoop/FetchHDFS.java       |    3 +
 .../apache/nifi/processors/hadoop/GetHDFS.java  |    3 +
 .../apache/nifi/processors/hadoop/ListHDFS.java |    3 +
 .../apache/nifi/processors/hadoop/PutHDFS.java  |    3 +
 .../processors/hl7/ExtractHL7Attributes.java    |    3 +
 .../apache/nifi/processors/hl7/RouteHL7.java    |    3 +
 .../processors/image/ExtractImageMetadata.java  |   36 +-
 .../nifi/processors/image/ResizeImage.java      |   38 +-
 .../apache/nifi/processors/kafka/GetKafka.java  |   21 +-
 .../apache/nifi/processors/kafka/PutKafka.java  |   10 +-
 .../nifi/processors/kite/ConvertCSVToAvro.java  |   16 +-
 .../nifi/processors/kite/ConvertJSONToAvro.java |   14 +-
 .../processors/kite/StoreInKiteDataset.java     |    9 +-
 .../nifi/processors/yandex/YandexTranslate.java |    3 +
 .../nifi-pcap-processors/.gitignore             |    1 +
 .../nifi/processors/twitter/GetTwitter.java     |    5 +-
 .../apache/nifi/processors/solr/GetSolr.java    |   43 +-
 .../processors/solr/PutSolrContentStream.java   |   33 +-
 .../standard/Base64EncodeContent.java           |  171 +-
 .../processors/standard/CompressContent.java    |   15 +-
 .../nifi/processors/standard/ControlRate.java   |  683 ++---
 .../standard/ConvertCharacterSet.java           |    3 +
 .../processors/standard/ConvertJSONToSQL.java   |    3 +
 .../processors/standard/DetectDuplicate.java    |    3 +
 .../processors/standard/DistributeLoad.java     |    3 +
 .../processors/standard/DuplicateFlowFile.java  |    3 +
 .../nifi/processors/standard/EncodeContent.java |   15 +-
 .../processors/standard/EncryptContent.java     |    3 +
 .../processors/standard/EvaluateJsonPath.java   |   38 +-
 .../nifi/processors/standard/EvaluateXPath.java |   29 +-
 .../processors/standard/EvaluateXQuery.java     |   25 +-
 .../processors/standard/ExecuteProcess.java     |    3 +
 .../nifi/processors/standard/ExecuteSQL.java    |    3 +
 .../standard/ExecuteStreamCommand.java          |    7 +-
 .../nifi/processors/standard/ExtractText.java   |    3 +
 .../processors/standard/GenerateFlowFile.java   |   11 +-
 .../apache/nifi/processors/standard/GetFTP.java |   13 +-
 .../nifi/processors/standard/GetFile.java       |    7 +-
 .../nifi/processors/standard/GetHTTP.java       |    3 +
 .../nifi/processors/standard/GetJMSQueue.java   |    3 +
 .../nifi/processors/standard/GetJMSTopic.java   |    3 +
 .../nifi/processors/standard/GetSFTP.java       |    7 +-
 .../processors/standard/HandleHttpRequest.java  |    7 +-
 .../processors/standard/HandleHttpResponse.java |    5 +-
 .../nifi/processors/standard/HashAttribute.java |    5 +-
 .../nifi/processors/standard/HashContent.java   |    5 +-
 .../processors/standard/IdentifyMimeType.java   |    5 +-
 .../nifi/processors/standard/InvokeHTTP.java    |    3 +
 .../nifi/processors/standard/ListenHTTP.java    |   16 +-
 .../nifi/processors/standard/ListenUDP.java     |   18 +-
 .../nifi/processors/standard/LogAttribute.java  |   16 +-
 .../nifi/processors/standard/MergeContent.java  |   11 +-
 .../nifi/processors/standard/ModifyBytes.java   |   14 +-
 .../processors/standard/MonitorActivity.java    |   31 +-
 .../nifi/processors/standard/PostHTTP.java      |    3 +
 .../nifi/processors/standard/PutEmail.java      |    3 +
 .../apache/nifi/processors/standard/PutFTP.java |    3 +
 .../nifi/processors/standard/PutFile.java       |    3 +
 .../apache/nifi/processors/standard/PutJMS.java |    5 +-
 .../nifi/processors/standard/PutSFTP.java       |    3 +
 .../apache/nifi/processors/standard/PutSQL.java |    3 +
 .../nifi/processors/standard/ReplaceText.java   |   54 +-
 .../standard/ReplaceTextWithMapping.java        |   18 +-
 .../processors/standard/RouteOnAttribute.java   |    3 +
 .../processors/standard/RouteOnContent.java     |   19 +-
 .../nifi/processors/standard/ScanAttribute.java |   19 +-
 .../nifi/processors/standard/ScanContent.java   |    5 +-
 .../processors/standard/SegmentContent.java     |    7 +-
 .../nifi/processors/standard/SplitContent.java  |    7 +-
 .../nifi/processors/standard/SplitJson.java     |   32 +-
 .../nifi/processors/standard/SplitText.java     |   53 +-
 .../nifi/processors/standard/SplitXml.java      |   18 +-
 .../nifi/processors/standard/TransformXml.java  |    3 +
 .../nifi/processors/standard/UnpackContent.java |    9 +-
 .../nifi/processors/standard/ValidateXml.java   |   16 +-
 .../processors/attributes/UpdateAttribute.java  |    9 +-
 93 files changed, 2418 insertions(+), 2027 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
----------------------------------------------------------------------
diff --git a/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
new file mode 100644
index 0000000..97e6b88
--- /dev/null
+++ b/nifi-api/src/main/java/org/apache/nifi/annotation/behavior/InputRequirement.java
@@ -0,0 +1,51 @@
+package org.apache.nifi.annotation.behavior;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * <p>
+ * Marker interface that a Processor can use to indicate whether it accepts, requires, or forbids
+ * input from other Processors. This information is used by the framework in order to ensure that
+ * a Processor is marked as invalid if it is missing necessary input or has input that will be ignored.
+ * This information also is used by the NiFi UI in order to prevent users from making connections
+ * to Processors that don't make sense.
+ * </p>
+ */
+@Documented
+@Target({ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+@Inherited
+public @interface InputRequirement {
+	Requirement value();
+	
+	public static enum Requirement {
+		/**
+		 * This value is used to indicate that the Processor requires input from other Processors
+		 * in order to run. As a result, the Processor will not be valid if it does not have any
+		 * incoming connections.
+		 */
+		INPUT_REQUIRED,
+		
+		/**
+		 * This value is used to indicate that the Processor will consume data from an incoming
+		 * connection but does not require an incoming connection in order to perform its task.
+		 * If the {@link InputRequirement} annotation is not present, this is the default value
+		 * that is used.
+		 */
+		INPUT_ALLOWED,
+		
+		/**
+		 * This value is used to indicate that the Processor is a "Source Processor" and does
+		 * not accept incoming connections. Because the Processor does not pull FlowFiles from
+		 * an incoming connection, it can be very confusing for users who create incoming connections
+		 * to the Processor. As a result, this value can be used in order to clarify that incoming
+		 * connections will not be used. This prevents the user from even creating such a connection.
+		 */
+		INPUT_FORBIDDEN;
+	}
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-docs/src/main/asciidoc/developer-guide.adoc
----------------------------------------------------------------------
diff --git a/nifi-docs/src/main/asciidoc/developer-guide.adoc b/nifi-docs/src/main/asciidoc/developer-guide.adoc
index f9950d5..28df5c2 100644
--- a/nifi-docs/src/main/asciidoc/developer-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/developer-guide.adoc
@@ -1633,6 +1633,17 @@ will handle your Processor:
 		not there is any data on an input queue. This is useful, for example, if the Processor needs to be triggered to run
 		periodically to time out a network connection.
 
+    - `InputRequirement`: By default, all Processors will allow users to create incoming connections for the Processor, but
+        if the user does not create an incoming connection, the Processor is still valid and can be scheduled to run. For Processors
+        that are expected to be used as a "Source Processor," though, this can be confusing to the user, and the user may attempt to
+        send FlowFiles to that Processor, only for the FlowFiles to queue up without being processed. Conversely, if the Processor
+        expects incoming FlowFiles but does not have an input queue, the Processor will be scheduled to run but will perform no work,
+        as it will receive no FlowFile, and this leads to confusion as well. As a result, we can use the `@InputRequirement` annotation
+        and provide it a value of `INPUT_REQUIRED`, `INPUT_ALLOWED`, or `INPUT_FORBIDDEN`. This provides information to the framework
+        about when the Processor should be made invalid, or whether or not the user should even be able to draw a Connection to the
+        Processor. For instance, if a Processor is annotated with `InputRequirement(Requirement.INPUT_FORBIDDEN)`, then the user will
+        not even be able to create a Connection with that Processor as the destination.
+
 
 === Data Buffering
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
index 8832a73..b214427 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ConvertAvroToJSON.java
@@ -29,6 +29,8 @@ import org.apache.avro.file.DataFileStream;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -46,6 +48,7 @@ import org.apache.nifi.processor.io.StreamCallback;
 @SideEffectFree
 @SupportsBatching
 @Tags({ "json", "avro", "binary" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Converts a Binary Avro record into a JSON object. This processor provides a direct mapping of an Avro field to a JSON field, such "
     + "that the resulting JSON will have the same hierarchical structure as the Avro document. Note that the Avro schema information will be lost, as this "
     + "is not a translation from binary Avro to JSON formatted Avro. The output JSON is encoded the UTF-8 encoding. If an incoming FlowFile contains a stream of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
index 48aad7d..4cf5289 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java
@@ -16,6 +16,19 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
 import org.apache.avro.Schema;
 import org.apache.avro.SchemaNormalization;
 import org.apache.avro.file.DataFileStream;
@@ -23,6 +36,8 @@ import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -41,22 +56,10 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.processor.util.StandardValidators;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "schema", "metadata" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Extracts metadata from the header of an Avro datafile.")
 @WritesAttributes({
         @WritesAttribute(attribute = "schema.type", description = "The type of the schema (i.e. record, enum, etc.)."),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
index 3b344b5..dbf5778 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
@@ -16,6 +16,18 @@
  */
 package org.apache.nifi.processors.avro;
 
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.file.DataFileStream;
@@ -26,6 +38,8 @@ import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.io.EncoderFactory;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SideEffectFree;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
@@ -45,21 +59,10 @@ import org.apache.nifi.processor.util.StandardValidators;
 import org.apache.nifi.stream.io.BufferedOutputStream;
 import org.apache.nifi.util.ObjectHolder;
 
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 @SideEffectFree
 @SupportsBatching
 @Tags({ "avro", "split" })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Splits a binary encoded Avro datafile into smaller files based on the configured Output Size. The Output Strategy determines if " +
         "the smaller files will be Avro datafiles, or bare Avro records with metadata in the FlowFile attributes. The output will always be binary encoded.")
 public class SplitAvro extends AbstractProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
index 2406b67..131e671 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/FetchS3Object.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -45,6 +47,7 @@ import com.amazonaws.services.s3.model.S3Object;
 
 @SupportsBatching
 @SeeAlso({PutS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Get", "Fetch"})
 @CapabilityDescription("Retrieves the contents of an S3 Object and writes it to the content of a FlowFile")
 @WritesAttributes({

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
index 24c82dd..7398c4e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/PutS3Object.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
@@ -54,6 +56,7 @@ import com.amazonaws.services.s3.model.StorageClass;
 
 @SupportsBatching
 @SeeAlso({FetchS3Object.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "S3", "AWS", "Archive", "Put"})
 @CapabilityDescription("Puts FlowFiles to an Amazon S3 Bucket")
 @DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
@@ -101,7 +104,8 @@ public class PutS3Object extends AbstractS3Processor {
                 .build();
     }
 
-    public void onTrigger(final ProcessContext context, final ProcessSession session) {
+    @Override
+	public void onTrigger(final ProcessContext context, final ProcessSession session) {
         FlowFile flowFile = session.get();
         if (flowFile == null) {
             return;

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
index 7d42703..e571ff4 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sns/PutSNS.java
@@ -23,6 +23,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -41,6 +43,7 @@ import com.amazonaws.services.sns.model.PublishRequest;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"amazon", "aws", "sns", "topic", "put", "publish", "pubsub"})
 @CapabilityDescription("Sends the content of a FlowFile as a notification to the Amazon Simple Notification Service")
 public class PutSNS extends AbstractSNSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
index 65e020d..f88aa71 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/DeleteSQS.java
@@ -21,6 +21,8 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -37,6 +39,7 @@ import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry;
 
 @SupportsBatching
 @SeeAlso({GetSQS.class, PutSQS.class})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Delete"})
 @CapabilityDescription("Deletes a message from an Amazon Simple Queuing Service Queue")
 public class DeleteSQS extends AbstractSQSProcessor {

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
index 7c2dd2d..a140999 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/GetSQS.java
@@ -28,6 +28,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.behavior.WritesAttribute;
 import org.apache.nifi.annotation.behavior.WritesAttributes;
@@ -51,8 +53,9 @@ import com.amazonaws.services.sqs.model.ReceiveMessageRequest;
 import com.amazonaws.services.sqs.model.ReceiveMessageResult;
 
 @SupportsBatching
+@SeeAlso({ PutSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Get", "Fetch", "Poll"})
-@SeeAlso({PutSQS.class, DeleteSQS.class})
 @CapabilityDescription("Fetches messages from an Amazon Simple Queuing Service Queue")
 @WritesAttributes({
     @WritesAttribute(attribute = "hash.value", description = "The MD5 sum of the message"),

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
index 3961f32..0af508e 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/sqs/PutSQS.java
@@ -28,6 +28,8 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.nifi.annotation.behavior.DynamicProperty;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.SupportsBatching;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.SeeAlso;
@@ -45,8 +47,9 @@ import com.amazonaws.services.sqs.model.SendMessageBatchRequest;
 import com.amazonaws.services.sqs.model.SendMessageBatchRequestEntry;
 
 @SupportsBatching
+@SeeAlso({ GetSQS.class, DeleteSQS.class })
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @Tags({"Amazon", "AWS", "SQS", "Queue", "Put", "Publish"})
-@SeeAlso({GetSQS.class, DeleteSQS.class})
 @CapabilityDescription("Publishes a message to an Amazon Simple Queuing Service Queue")
 @DynamicProperty(name = "The name of a Message Attribute to add to the message", value = "The value of the Message Attribute",
         description = "Allows the user to add key/value pairs as Message Attributes by adding a property whose name will become the name of "

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
index 57e0278..f93b215 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSink.java
@@ -16,20 +16,19 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.Sink;
 import org.apache.flume.conf.Configurables;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -40,12 +39,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume sink
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "put", "sink"})
+@InputRequirement(Requirement.INPUT_REQUIRED)
 @CapabilityDescription("Execute a Flume sink. Each input FlowFile is converted into a Flume Event for processing by the sink.")
-@TriggerSerially
 public class ExecuteFlumeSink extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SINK_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
index 600f4b1..3aad6b7 100644
--- a/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
+++ b/nifi-nar-bundles/nifi-flume-bundle/nifi-flume-processors/src/main/java/org/apache/nifi/processors/flume/ExecuteFlumeSource.java
@@ -16,12 +16,10 @@
  */
 package org.apache.nifi.processors.flume;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.flume.EventDeliveryException;
 import org.apache.flume.EventDrivenSource;
 import org.apache.flume.PollableSource;
@@ -29,12 +27,13 @@ import org.apache.flume.Source;
 import org.apache.flume.channel.ChannelProcessor;
 import org.apache.flume.conf.Configurables;
 import org.apache.flume.source.EventDrivenSourceRunner;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.behavior.TriggerSerially;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;
 import org.apache.nifi.annotation.documentation.Tags;
 import org.apache.nifi.annotation.lifecycle.OnScheduled;
 import org.apache.nifi.annotation.lifecycle.OnStopped;
-
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.Validator;
 import org.apache.nifi.processor.ProcessContext;
@@ -46,12 +45,17 @@ import org.apache.nifi.processor.SchedulingContext;
 import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
 /**
  * This processor runs a Flume source
  */
+@TriggerSerially
 @Tags({"flume", "hadoop", "get", "source"})
+@InputRequirement(Requirement.INPUT_FORBIDDEN)
 @CapabilityDescription("Execute a Flume source. Each Flume Event is sent to the success relationship as a FlowFile")
-@TriggerSerially
 public class ExecuteFlumeSource extends AbstractFlumeProcessor {
 
     public static final PropertyDescriptor SOURCE_TYPE = new PropertyDescriptor.Builder()

http://git-wip-us.apache.org/repos/asf/nifi/blob/4afd8f88/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
index f2a83d0..2f72d0f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/ProcessorNode.java
@@ -20,6 +20,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.connectable.Connectable;
 import org.apache.nifi.controller.service.ControllerServiceNode;
 import org.apache.nifi.controller.service.ControllerServiceProvider;
@@ -30,70 +31,72 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
 
 public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
 
-    public ProcessorNode(final Processor processor, final String id,
-            final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
-        super(processor, id, validationContextFactory, serviceProvider);
-    }
+	public ProcessorNode(final Processor processor, final String id,
+		final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider) {
+		super(processor, id, validationContextFactory, serviceProvider);
+	}
 
-    public abstract boolean isIsolated();
+	public abstract boolean isIsolated();
 
-    public abstract boolean isTriggerWhenAnyDestinationAvailable();
+	public abstract boolean isTriggerWhenAnyDestinationAvailable();
 
-    @Override
-    public abstract boolean isSideEffectFree();
+	@Override
+	public abstract boolean isSideEffectFree();
 
-    public abstract boolean isTriggeredSerially();
+	public abstract boolean isTriggeredSerially();
 
-    public abstract boolean isEventDrivenSupported();
+	public abstract boolean isEventDrivenSupported();
 
-    public abstract boolean isHighThroughputSupported();
+	public abstract boolean isHighThroughputSupported();
 
-    @Override
-    public abstract boolean isValid();
+	public abstract Requirement getInputRequirement();
 
-    public abstract void setScheduledState(ScheduledState scheduledState);
+	@Override
+	public abstract boolean isValid();
 
-    public abstract void setBulletinLevel(LogLevel bulletinLevel);
+	public abstract void setScheduledState(ScheduledState scheduledState);
 
-    public abstract LogLevel getBulletinLevel();
+	public abstract void setBulletinLevel(LogLevel bulletinLevel);
 
-    public abstract Processor getProcessor();
+	public abstract LogLevel getBulletinLevel();
 
-    public abstract void yield(long period, TimeUnit timeUnit);
+	public abstract Processor getProcessor();
 
-    public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
+	public abstract void yield(long period, TimeUnit timeUnit);
 
-    public abstract Set<Relationship> getAutoTerminatedRelationships();
+	public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
 
-    public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
+	public abstract Set<Relationship> getAutoTerminatedRelationships();
 
-    @Override
-    public abstract SchedulingStrategy getSchedulingStrategy();
+	public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
 
-    public abstract void setRunDuration(long duration, TimeUnit timeUnit);
+	@Override
+	public abstract SchedulingStrategy getSchedulingStrategy();
 
-    public abstract long getRunDuration(TimeUnit timeUnit);
+	public abstract void setRunDuration(long duration, TimeUnit timeUnit);
 
-    public abstract Map<String, String> getStyle();
+	public abstract long getRunDuration(TimeUnit timeUnit);
 
-    public abstract void setStyle(Map<String, String> style);
+	public abstract Map<String, String> getStyle();
 
-    /**
-     * @return the number of threads (concurrent tasks) currently being used by
-     * this Processor
-     */
-    public abstract int getActiveThreadCount();
+	public abstract void setStyle(Map<String, String> style);
 
-    /**
-     * Verifies that this Processor can be started if the provided set of
-     * services are enabled. This is introduced because we need to verify that
-     * all components can be started before starting any of them. In order to do
-     * that, we need to know that this component can be started if the given
-     * services are enabled, as we will then enable the given services before
-     * starting this component.
-     *
-     * @param ignoredReferences to ignore
-     */
-    public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
+	/**
+	 * @return the number of threads (concurrent tasks) currently being used by
+	 * this Processor
+	 */
+	public abstract int getActiveThreadCount();
+
+	/**
+	 * Verifies that this Processor can be started if the provided set of
+	 * services are enabled. This is introduced because we need to verify that
+	 * all components can be started before starting any of them. In order to do
+	 * that, we need to know that this component can be started if the given
+	 * services are enabled, as we will then enable the given services before
+	 * starting this component.
+	 *
+	 * @param ignoredReferences to ignore
+	 */
+	public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
 
 }


[17/17] nifi git commit: NIFI-810: Reworded validation errors pertaining ot upstream connections

Posted by ma...@apache.org.
NIFI-810: Reworded validation errors pertaining ot upstream connections


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/8e2308b7
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/8e2308b7
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/8e2308b7

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 8e2308b78de480dd7848ffe8efb485a5ee61c42a
Parents: ccfb57f
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Oct 7 17:53:04 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Oct 7 17:53:04 2015 -0400

----------------------------------------------------------------------
 .../org/apache/nifi/controller/StandardProcessorNode.java    | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/8e2308b7/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
index ad22c6d..2b0d413 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardProcessorNode.java
@@ -1048,8 +1048,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
                     final int incomingConnCount = getIncomingConnections().size();
                     if (incomingConnCount != 0) {
                         results.add(new ValidationResult.Builder()
-                            .explanation("Processor does not accept Incoming Connections but is currently configured with " + incomingConnCount + " Incoming Connections")
-                            .subject("Incoming Connections")
+                            .explanation("Processor does not allow upstream connections but currently has " + incomingConnCount)
+                            .subject("Upstream Connections")
                             .valid(false)
                             .build());
                     }
@@ -1058,8 +1058,8 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
                 case INPUT_REQUIRED: {
                     if (getIncomingConnections().isEmpty()) {
                         results.add(new ValidationResult.Builder()
-                            .explanation("Processor required at least one Incoming Connection in order to perform its function but currently has no Incoming Connection")
-                            .subject("Incoming Connections")
+                            .explanation("Processor requires an upstream connection but currently has none")
+                            .subject("Upstream Connections")
                             .valid(false)
                             .build());
                     }


[02/17] nifi git commit: nifi-992 Improvements based on code review.

Posted by ma...@apache.org.
nifi-992 Improvements based on code review.

- Removed checkstyle and contrib-check profile since it's inherit from
  top-level pom.
- Consolidate DOC_ID and DOC_ID_EXP into a single DOC_ID property.
- Add capability description on GetCouchbaseKey.
- Fixed documentation spell misses.
- Handle Exceptions accordingly.
- Add 'retry' relationship.

Signed-off-by: Bryan Bende <bb...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/72eb64e8
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/72eb64e8
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/72eb64e8

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 72eb64e8a43a08a5be988a3826b0a116c57915ea
Parents: 2466a24
Author: ijokarumawak <ij...@gmail.com>
Authored: Wed Sep 30 00:58:39 2015 +0900
Committer: Bryan Bende <bb...@apache.org>
Committed: Tue Sep 29 13:48:13 2015 -0400

----------------------------------------------------------------------
 .../nifi-couchbase-processors/pom.xml           | 144 ----------
 .../nifi/couchbase/CouchbaseAttributes.java     |   4 +
 .../couchbase/AbstractCouchbaseProcessor.java   |  94 ++++---
 .../couchbase/CouchbaseExceptionMappings.java   | 128 +++++++++
 .../couchbase/ErrorHandlingStrategy.java        |  59 ++++
 .../processors/couchbase/GetCouchbaseKey.java   |  45 ++-
 .../processors/couchbase/PutCouchbaseKey.java   |  45 +--
 .../couchbase/TestCouchbaseClusterService.java  |   2 +-
 .../couchbase/TestGetCouchbaseKey.java          | 282 +++++++++++++++++--
 .../couchbase/TestPutCouchbaseKey.java          |  95 +++++--
 10 files changed, 626 insertions(+), 272 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
index 33b0baa..257ef46 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
@@ -61,148 +61,4 @@
             <scope>test</scope>
 		</dependency>
     </dependencies>
-	<build>
-		<pluginManagement>
-			<plugins>
-				<plugin>
-					<groupId>org.apache.maven.plugins</groupId>
-					<artifactId>maven-checkstyle-plugin</artifactId>
-					<version>2.15</version>
-					<dependencies>
-						<dependency>
-							<groupId>com.puppycrawl.tools</groupId>
-							<artifactId>checkstyle</artifactId>
-							<version>6.5</version>
-						</dependency>
-					</dependencies>
-				</plugin>
-			</plugins>
-		</pluginManagement>
-		<plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-checkstyle-plugin</artifactId>
-                <configuration>
-                    <checkstyleRules>
-                        <module name="Checker">
-                            <property name="charset" value="UTF-8" />
-                            <property name="severity" value="warning" />
-                            <!-- Checks for whitespace -->
-                            <!-- See http://checkstyle.sf.net/config_whitespace.html -->
-                            <module name="FileTabCharacter">
-                                <property name="eachLine" value="true" />
-                            </module>
-                            <module name="TreeWalker">
-                                <module name="RegexpSinglelineJava">
-                                    <property name="format" value="\s+$" />
-                                    <property name="message" value="Line has trailing whitespace." />
-                                </module>
-                                <module name="RegexpSinglelineJava">
-                                    <property name="format" value="[@]see\s+[{][@]link" />
-                                    <property name="message" value="Javadoc @see does not need @link: pick one or the other." />
-                                </module>
-                                <module name="OuterTypeFilename" />
-                                <module name="LineLength">
-                                    <!-- needs extra, because Eclipse formatter ignores the ending left
-                                        brace -->
-                                    <property name="max" value="200" />
-                                    <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://" />
-                                </module>
-                                <module name="AvoidStarImport" />
-                                <module name="UnusedImports">
-                                    <property name="processJavadoc" value="true" />
-                                </module>
-                                 <module name="NoLineWrap" />
-                                <module name="LeftCurly">
-                                    <property name="maxLineLength" value="160" />
-                                </module>
-                                <module name="RightCurly" />
-                                <module name="RightCurly">
-                                    <property name="option" value="alone" />
-                                    <property name="tokens" value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO, STATIC_INIT, INSTANCE_INIT" />
-                                </module>
-                                <module name="SeparatorWrap">
-                                    <property name="tokens" value="DOT" />
-                                    <property name="option" value="nl" />
-                                </module>
-                                <module name="SeparatorWrap">
-                                    <property name="tokens" value="COMMA" />
-                                    <property name="option" value="EOL" />
-                                </module>
-                                <module name="PackageName">
-                                    <property name="format" value="^[a-z]+(\.[a-z][a-zA-Z0-9]*)*$" />
-                                </module>
-                                <module name="MethodTypeParameterName">
-                                    <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)" />
-                                </module>
-                                <module name="MethodParamPad" />
-                                <module name="OperatorWrap">
-                                    <property name="option" value="NL" />
-                                    <property name="tokens" value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR, LT, MINUS, MOD, NOT_EQUAL, QUESTION, SL, SR, STAR " />
-                                </module>
-                                 <module name="AnnotationLocation">
-                                    <property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF" />
-                                </module>
-                                <module name="AnnotationLocation">
-                                    <property name="tokens" value="VARIABLE_DEF" />
-                                    <property name="allowSamelineMultipleAnnotations" value="true" />
-                                </module>
-                                <module name="NonEmptyAtclauseDescription" />
-                                <module name="JavadocMethod">
-                                    <property name="allowMissingJavadoc" value="true" />
-                                    <property name="allowMissingParamTags" value="true" />
-                                    <property name="allowMissingThrowsTags" value="true" />
-                                    <property name="allowMissingReturnTag" value="true" />
-                                    <property name="allowedAnnotations" value="Override,Test,BeforeClass,AfterClass,Before,After" />
-                                    <property name="allowThrowsTagsForSubclasses" value="true" />
-                                </module>
-                                <module name="SingleLineJavadoc" />
-                             </module>
-                        </module>
-                    </checkstyleRules>
-                    <violationSeverity>warning</violationSeverity>
-                    <includeTestSourceDirectory>true</includeTestSourceDirectory>
-                </configuration>
-	 		</plugin>
- 		</plugins>
-	</build>
-    <profiles>
-        <profile>
-            <!-- Checks style and licensing requirements. This is a good idea to run
-                for contributions and for the release process. While it would be nice to
-                run always these plugins can considerably slow the build and have proven
-                to create unstable builds in our multi-module project and when building using
-                multiple threads. The stability issues seen with Checkstyle in multi-module
-                builds include false-positives and false negatives. -->
-            <id>contrib-check</id>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                                <phase>verify</phase>
-                            </execution>
-                        </executions>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-checkstyle-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>check-style</id>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
index a4d69fc..3bef8c5 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/couchbase/CouchbaseAttributes.java
@@ -43,6 +43,10 @@ public enum CouchbaseAttributes implements FlowFileAttributeKey {
      * The expiration of a related document.
      */
     Expiry("couchbase.doc.expiry"),
+    /**
+     * The thrown CouchbaseException class.
+     */
+    Exception("couchbase.exception"),
     ;
 
     private final String key;

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
index d370728..066b1ca 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/AbstractCouchbaseProcessor.java
@@ -23,13 +23,19 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.couchbase.CouchbaseAttributes;
 import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
 import org.apache.nifi.processor.AbstractProcessor;
 import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
 import org.apache.nifi.processor.ProcessorInitializationContext;
 import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.util.StandardValidators;
 
+import com.couchbase.client.core.CouchbaseException;
 import com.couchbase.client.java.Bucket;
 
 /**
@@ -46,49 +52,45 @@ public abstract class AbstractCouchbaseProcessor extends AbstractProcessor {
             .build();
 
     public static final PropertyDescriptor DOC_ID = new PropertyDescriptor
-            .Builder().name("Static Document Id")
-            .description("A static, fixed Couchbase document id.")
-            .required(false)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .build();
-
-    public static final PropertyDescriptor DOC_ID_EXP = new PropertyDescriptor
-            .Builder().name("Document Id Expression")
-            .description("An expression to construct the Couchbase document id."
-                    + " If 'Static Document Id' is specified, then 'Static Document Id' is used.")
-            .required(false)
+            .Builder().name("Document Id")
+            .description("A static, fixed Couchbase document id."
+                    + "Or an expression to construct the Couchbase document id.")
             .expressionLanguageSupported(true)
-            .addValidator(StandardValidators.ATTRIBUTE_EXPRESSION_LANGUAGE_VALIDATOR)
+            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
             .build();
 
 
     public static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("All FlowFiles that are written to Couchbase Server are routed to this relationship.")
-            .build();
+        .name("success")
+        .description("All FlowFiles that are written to Couchbase Server are routed to this relationship.")
+        .build();
     public static final Relationship REL_ORIGINAL = new Relationship.Builder()
-            .name("original")
-            .description("The original input file will be routed to this destination when it has been successfully processed.")
-            .build();
+        .name("original")
+        .description("The original input file will be routed to this destination when it has been successfully processed.")
+        .build();
+    public static final Relationship REL_RETRY = new Relationship.Builder()
+        .name("retry")
+        .description("All FlowFiles that cannot written to Couchbase Server but can be retried are routed to this relationship.")
+        .build();
     public static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("All FlowFiles that cannot written to Couchbase Server are routed to this relationship.")
-            .build();
+        .name("failure")
+        .description("All FlowFiles that cannot written to Couchbase Server and can't be retried are routed to this relationship.")
+        .build();
 
     public static final PropertyDescriptor COUCHBASE_CLUSTER_SERVICE = new PropertyDescriptor
-            .Builder().name("Couchbase Cluster Controller Service")
-            .description("A Couchbase Cluster Controller Service which manages connections to a Couchbase cluster.")
-            .required(true)
-            .identifiesControllerService(CouchbaseClusterControllerService.class)
-            .build();
+        .Builder().name("Couchbase Cluster Controller Service")
+        .description("A Couchbase Cluster Controller Service which manages connections to a Couchbase cluster.")
+        .required(true)
+        .identifiesControllerService(CouchbaseClusterControllerService.class)
+        .build();
 
     public static final PropertyDescriptor BUCKET_NAME = new PropertyDescriptor
-            .Builder().name("Bucket Name")
-            .description("The name of bucket to access.")
-            .required(true)
-            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-            .defaultValue("default")
-            .build();
+        .Builder().name("Bucket Name")
+        .description("The name of bucket to access.")
+        .required(true)
+        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+        .defaultValue("default")
+        .build();
 
     private List<PropertyDescriptor> descriptors;
 
@@ -171,4 +173,32 @@ public abstract class AbstractCouchbaseProcessor extends AbstractProcessor {
             .toString();
     }
 
+    /**
+     * Handles the thrown CocuhbaseException accordingly.
+     * @param session a process session
+     * @param logger a logger
+     * @param inFile an input FlowFile
+     * @param e the thrown CouchbaseException
+     * @param errMsg a message to be logged
+     */
+    protected void handleCouchbaseException(final ProcessSession session,
+            final ProcessorLog logger, FlowFile inFile, CouchbaseException e,
+            String errMsg) {
+        logger.error(errMsg, e);
+        if(inFile != null){
+            ErrorHandlingStrategy strategy = CouchbaseExceptionMappings.getStrategy(e);
+            switch(strategy.result()) {
+            case ProcessException:
+                throw new ProcessException(errMsg, e);
+            case Failure:
+                inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName());
+                session.transfer(inFile, REL_FAILURE);
+                break;
+            case Retry:
+                inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName());
+                session.transfer(inFile, REL_RETRY);
+                break;
+            }
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/CouchbaseExceptionMappings.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/CouchbaseExceptionMappings.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/CouchbaseExceptionMappings.java
new file mode 100644
index 0000000..87ffabb
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/CouchbaseExceptionMappings.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.ConfigurationError;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Fatal;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.InvalidInput;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.TemporalClusterError;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.TemporalFlowFileError;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.couchbase.client.core.BackpressureException;
+import com.couchbase.client.core.BucketClosedException;
+import com.couchbase.client.core.CouchbaseException;
+import com.couchbase.client.core.DocumentConcurrentlyModifiedException;
+import com.couchbase.client.core.DocumentMutationLostException;
+import com.couchbase.client.core.ReplicaNotConfiguredException;
+import com.couchbase.client.core.RequestCancelledException;
+import com.couchbase.client.core.ServiceNotAvailableException;
+import com.couchbase.client.core.config.ConfigurationException;
+import com.couchbase.client.core.endpoint.SSLException;
+import com.couchbase.client.core.endpoint.kv.AuthenticationException;
+import com.couchbase.client.core.env.EnvironmentException;
+import com.couchbase.client.core.state.NotConnectedException;
+import com.couchbase.client.java.error.BucketDoesNotExistException;
+import com.couchbase.client.java.error.CannotRetryException;
+import com.couchbase.client.java.error.CouchbaseOutOfMemoryException;
+import com.couchbase.client.java.error.DurabilityException;
+import com.couchbase.client.java.error.InvalidPasswordException;
+import com.couchbase.client.java.error.RequestTooBigException;
+import com.couchbase.client.java.error.TemporaryFailureException;
+import com.couchbase.client.java.error.TranscodingException;
+
+public class CouchbaseExceptionMappings {
+
+    private static final Map<Class<? extends CouchbaseException>, ErrorHandlingStrategy>mapping = new HashMap<>();
+
+    /*
+     * - Won't happen
+     * BucketAlreadyExistsException: never create a bucket
+     * CASMismatchException: cas-id and replace is not used yet
+     * DesignDocumentException: View is not used yet
+     * DocumentAlreadyExistsException: insert is not used yet
+     * DocumentDoesNotExistException: replace is not used yet
+     * FlushDisabledException: never call flush
+     * RepositoryMappingException: EntityDocument is not used
+     * TemporaryLockFailureException: we don't obtain locks
+     * ViewDoesNotExistException: View is not used yet
+     * NamedPreparedStatementException: N1QL is not used yet
+     * QueryExecutionException: N1QL is not used yet
+     */
+    static {
+        /*
+         * ConfigurationError
+         */
+        mapping.put(AuthenticationException.class, ConfigurationError);
+        mapping.put(BucketDoesNotExistException.class, ConfigurationError);
+        mapping.put(ConfigurationException.class, ConfigurationError);
+        mapping.put(InvalidPasswordException.class, ConfigurationError);
+        mapping.put(EnvironmentException.class, ConfigurationError);
+        // when Couchbase doesn't have enough replica
+        mapping.put(ReplicaNotConfiguredException.class, ConfigurationError);
+        // when a particular Service(KV, View, Query, DCP) isn't running in a cluster
+        mapping.put(ServiceNotAvailableException.class, ConfigurationError);
+        // SSL configuration error, such as key store mis configuration.
+        mapping.put(SSLException.class, ConfigurationError);
+
+        /*
+         * InvalidInput
+         */
+        mapping.put(RequestTooBigException.class, InvalidInput);
+        mapping.put(TranscodingException.class, InvalidInput);
+
+        /*
+         * Temporal Cluster Error
+         */
+        mapping.put(BackpressureException.class, TemporalClusterError);
+        mapping.put(CouchbaseOutOfMemoryException.class, TemporalClusterError);
+        mapping.put(TemporaryFailureException.class, TemporalClusterError);
+        // occurs when a connection gets lost
+        mapping.put(RequestCancelledException.class, TemporalClusterError);
+
+        /*
+         * Temporal FlowFile Error
+         */
+        mapping.put(DocumentConcurrentlyModifiedException.class, TemporalFlowFileError);
+        mapping.put(DocumentMutationLostException.class, TemporalFlowFileError);
+        mapping.put(DurabilityException.class, TemporalFlowFileError);
+
+        /*
+         * Fatal
+         */
+        mapping.put(BucketClosedException.class, Fatal);
+        mapping.put(CannotRetryException.class, Fatal);
+        mapping.put(NotConnectedException.class, Fatal);
+    }
+
+    /**
+     * Returns a registered error handling strategy.
+     * @param e the CouchbaseException
+     * @return a registered strategy, if it's not registered, then return Fatal
+     */
+    public static ErrorHandlingStrategy getStrategy(CouchbaseException e){
+        ErrorHandlingStrategy strategy = mapping.get(e.getClass());
+        if(strategy == null) {
+            // Treat unknown Exception as Fatal.
+            return ErrorHandlingStrategy.Fatal;
+        }
+        return strategy;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
new file mode 100644
index 0000000..75b8f46
--- /dev/null
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/ErrorHandlingStrategy.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.couchbase;
+
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Penalize;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Yield;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.Failure;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.ProcessException;
+import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.Retry;
+
+
+public enum ErrorHandlingStrategy {
+
+    ConfigurationError(ProcessException, Yield),
+    InvalidInput(Failure, Penalize),
+    TemporalClusterError(Retry, Yield),
+    TemporalFlowFileError(Retry, Penalize),
+    Fatal(Failure, Yield);
+
+    private final Result result;
+    private final Penalty penalty;
+    private ErrorHandlingStrategy(Result result, Penalty penalty){
+        this.result = result;
+        this.penalty = penalty;
+    }
+
+    public enum Result {
+        ProcessException, Failure, Retry;
+    }
+
+    /**
+     * Indicating yield or penalize the processing when transfer the input FlowFile.
+     */
+    public enum Penalty {
+        Yield, Penalize;
+    }
+
+    public Result result(){
+        return this.result;
+    }
+
+    public Penalty penalty(){
+        return this.penalty;
+    }
+}

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
index 6d9a476..8c15e29 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/GetCouchbaseKey.java
@@ -45,24 +45,27 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.stream.io.StreamUtils;
 
+import com.couchbase.client.core.CouchbaseException;
 import com.couchbase.client.java.Bucket;
 import com.couchbase.client.java.document.BinaryDocument;
 import com.couchbase.client.java.document.Document;
 import com.couchbase.client.java.document.RawJsonDocument;
+import com.couchbase.client.java.error.DocumentDoesNotExistException;
 
 @Tags({ "nosql", "couchbase", "database", "get" })
-@CapabilityDescription("Get a document from Couchbase Server via Key/Value access.")
+@CapabilityDescription("Get a document from Couchbase Server via Key/Value access. This processor can be triggered by an incoming FlowFile, or it can be scheduled on a timer")
 @SeeAlso({CouchbaseClusterControllerService.class})
 @ReadsAttributes({
-    @ReadsAttribute(attribute = "FlowFile content", description = "Used as a document id if none of 'Static Document Id' or 'Document Id Expression' is specified"),
-    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id Excepression.")
+    @ReadsAttribute(attribute = "FlowFile content", description = "Used as a document id if 'Document Id' is not specified"),
+    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id' expression.")
     })
 @WritesAttributes({
     @WritesAttribute(attribute="couchbase.cluster", description="Cluster where the document was retrieved from."),
     @WritesAttribute(attribute="couchbase.bucket", description="Bucket where the document was retrieved from."),
     @WritesAttribute(attribute="couchbase.doc.id", description="Id of the document."),
     @WritesAttribute(attribute="couchbase.doc.cas", description="CAS of the document."),
-    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document.")
+    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document."),
+    @WritesAttribute(attribute="couchbase.exception", description="If Couchbase related error occurs the CouchbaseException class name will be captured here.")
     })
 public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
 
@@ -70,13 +73,13 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
     protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
         descriptors.add(DOCUMENT_TYPE);
         descriptors.add(DOC_ID);
-        descriptors.add(DOC_ID_EXP);
     }
 
     @Override
     protected void addSupportedRelationships(Set<Relationship> relationships) {
         relationships.add(REL_SUCCESS);
         relationships.add(REL_ORIGINAL);
+        relationships.add(REL_RETRY);
         relationships.add(REL_FAILURE);
     }
 
@@ -86,15 +89,9 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
         FlowFile inFile = session.get();
 
         String docId = null;
-        if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
-            docId = context.getProperty(DOC_ID).getValue();
-        } else {
-            // Otherwise docId has to be extracted from inFile.
-            if ( inFile == null ) {
-                return;
-            }
-            if(!StringUtils.isEmpty(context.getProperty(DOC_ID_EXP).getValue())){
-                docId = context.getProperty(DOC_ID_EXP).evaluateAttributeExpressions(inFile).getValue();
+        try {
+            if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
+                docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(inFile).getValue();
             } else {
                 final byte[] content = new byte[(int) inFile.getSize()];
                 session.read(inFile, new InputStreamCallback() {
@@ -105,11 +102,14 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
                 });
                 docId = new String(content, StandardCharsets.UTF_8);
             }
+        } catch (Throwable t) {
+            throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + inFile);
         }
 
         if(StringUtils.isEmpty(docId)){
-            logger.error("Couldn't get document id from from {}", new Object[]{inFile});
-            session.transfer(inFile, REL_FAILURE);
+            if(inFile != null){
+                throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + inFile);
+            }
         }
 
         try {
@@ -137,8 +137,9 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
             }
 
             if(doc == null) {
-                logger.info("Document {} was not found in {}", new Object[]{docId, getTransitUrl(context)});
+                logger.warn("Document {} was not found in {}", new Object[]{docId, getTransitUrl(context)});
                 if(inFile != null){
+                    inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), DocumentDoesNotExistException.class.getName());
                     session.transfer(inFile, REL_FAILURE);
                 }
                 return;
@@ -160,13 +161,11 @@ public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
             session.getProvenanceReporter().receive(outFile, getTransitUrl(context));
             session.transfer(outFile, REL_SUCCESS);
 
-        } catch (Throwable t){
-            logger.error("Getting docuement {} from Couchbase Server using {} failed due to {}",
-                    new Object[]{docId, inFile, t}, t);
-            if(inFile != null){
-                session.transfer(inFile, REL_FAILURE);
-            }
+        } catch (CouchbaseException e){
+            String errMsg = String.format("Getting docuement %s from Couchbase Server using %s failed due to %s", docId, inFile, e);
+            handleCouchbaseException(session, logger, inFile, e, errMsg);
         }
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
index 6bfa480..8f41383 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/main/java/org/apache/nifi/processors/couchbase/PutCouchbaseKey.java
@@ -45,6 +45,7 @@ import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.processor.io.InputStreamCallback;
 import org.apache.nifi.stream.io.StreamUtils;
 
+import com.couchbase.client.core.CouchbaseException;
 import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
 import com.couchbase.client.deps.io.netty.buffer.Unpooled;
 import com.couchbase.client.java.PersistTo;
@@ -57,15 +58,16 @@ import com.couchbase.client.java.document.RawJsonDocument;
 @CapabilityDescription("Put a document to Couchbase Server via Key/Value access.")
 @SeeAlso({CouchbaseClusterControllerService.class})
 @ReadsAttributes({
-    @ReadsAttribute(attribute = "uuid", description = "Used as a document id if none of 'Static Document Id' or 'Document Id Expression' is specified"),
-    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id Excepression.")
+    @ReadsAttribute(attribute = "uuid", description = "Used as a document id if 'Document Id' is not specified"),
+    @ReadsAttribute(attribute = "*", description = "Any attribute can be used as part of a document id by 'Document Id' expression.")
     })
 @WritesAttributes({
     @WritesAttribute(attribute="couchbase.cluster", description="Cluster where the document was stored."),
     @WritesAttribute(attribute="couchbase.bucket", description="Bucket where the document was stored."),
     @WritesAttribute(attribute="couchbase.doc.id", description="Id of the document."),
     @WritesAttribute(attribute="couchbase.doc.cas", description="CAS of the document."),
-    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document.")
+    @WritesAttribute(attribute="couchbase.doc.expiry", description="Expiration of the document."),
+    @WritesAttribute(attribute="couchbase.exception", description="If Couchbase related error occurs the CouchbaseException class name will be captured here.")
     })
 public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
 
@@ -90,7 +92,6 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
     protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
         descriptors.add(DOCUMENT_TYPE);
         descriptors.add(DOC_ID);
-        descriptors.add(DOC_ID_EXP);
         descriptors.add(PERSIST_TO);
         descriptors.add(REPLICATE_TO);
     }
@@ -109,24 +110,25 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
             return;
         }
 
-        try {
-
-            final byte[] content = new byte[(int) flowFile.getSize()];
-            session.read(flowFile, new InputStreamCallback() {
-                @Override
-                public void process(final InputStream in) throws IOException {
-                    StreamUtils.fillBuffer(in, content, true);
-                }
-            });
-
+        String docId = null;
+        final byte[] content = new byte[(int) flowFile.getSize()];
+        session.read(flowFile, new InputStreamCallback() {
+            @Override
+            public void process(final InputStream in) throws IOException {
+                StreamUtils.fillBuffer(in, content, true);
+            }
+        });
 
-            String docId = String.valueOf(flowFile.getAttribute(CoreAttributes.UUID.key()));
+        try {
+            docId = String.valueOf(flowFile.getAttribute(CoreAttributes.UUID.key()));
             if(!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())){
-                docId = context.getProperty(DOC_ID).getValue();
-            } else if(!StringUtils.isEmpty(context.getProperty(DOC_ID_EXP).getValue())){
-                docId = context.getProperty(DOC_ID_EXP).evaluateAttributeExpressions(flowFile).getValue();
+                docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
             }
+        } catch (Throwable t) {
+            throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + flowFile);
+        }
 
+        try {
             Document<?> doc = null;
             DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
             switch (documentType){
@@ -141,7 +143,6 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
                 }
             }
 
-
             PersistTo persistTo = PersistTo.valueOf(context.getProperty(PERSIST_TO).getValue());
             ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue());
             doc = openBucket(context).upsert(doc, persistTo, replicateTo);
@@ -155,9 +156,9 @@ public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
             session.getProvenanceReporter().send(flowFile, getTransitUrl(context));
             session.transfer(flowFile, REL_SUCCESS);
 
-        } catch (Throwable t) {
-            logger.error("Writing {} into Couchbase Server failed due to {}", new Object[]{flowFile, t}, t);
-            session.transfer(flowFile, REL_FAILURE);
+        } catch (CouchbaseException e) {
+            String errMsg = String.format("Writing docuement %s to Couchbase Server using %s failed due to %s", docId, flowFile, e);
+            handleCouchbaseException(session, logger, flowFile, e, errMsg);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
index d96b1c2..eb2220d 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestCouchbaseClusterService.java
@@ -45,7 +45,7 @@ public class TestCouchbaseClusterService {
 
     @Test
     public void testConnectionFailure() throws InitializationException {
-        String connectionString = "couchbase://invalid-hostname";
+        String connectionString = "invalid-protocol://invalid-hostname";
         CouchbaseClusterControllerService service = new CouchbaseClusterService();
         testRunner.addControllerService(SERVICE_ID, service);
         testRunner.setProperty(service, CouchbaseClusterService.CONNECTION_STRING, connectionString);

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
index 4ea4dff..dca2ae3 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestGetCouchbaseKey.java
@@ -16,14 +16,16 @@
  */
 package org.apache.nifi.processors.couchbase;
 
+import static org.apache.nifi.couchbase.CouchbaseAttributes.Exception;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.BUCKET_NAME;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.COUCHBASE_CLUSTER_SERVICE;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOCUMENT_TYPE;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
-import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID_EXP;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_ORIGINAL;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_RETRY;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
+import static org.junit.Assert.fail;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -34,19 +36,28 @@ import java.util.Map;
 
 import org.apache.nifi.couchbase.CouchbaseAttributes;
 import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
+import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.util.MockFlowFile;
 import org.apache.nifi.util.TestRunner;
 import org.apache.nifi.util.TestRunners;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import com.couchbase.client.core.BackpressureException;
+import com.couchbase.client.core.CouchbaseException;
 import com.couchbase.client.core.ServiceNotAvailableException;
+import com.couchbase.client.core.endpoint.kv.AuthenticationException;
+import com.couchbase.client.core.state.NotConnectedException;
 import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
 import com.couchbase.client.deps.io.netty.buffer.Unpooled;
 import com.couchbase.client.java.Bucket;
 import com.couchbase.client.java.document.BinaryDocument;
 import com.couchbase.client.java.document.RawJsonDocument;
+import com.couchbase.client.java.error.DocumentDoesNotExistException;
+import com.couchbase.client.java.error.DurabilityException;
+import com.couchbase.client.java.error.RequestTooBigException;
 
 
 public class TestGetCouchbaseKey {
@@ -92,6 +103,7 @@ public class TestGetCouchbaseKey {
 
         testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(content);
@@ -104,57 +116,110 @@ public class TestGetCouchbaseKey {
     }
 
 
-    /**
-     * Use static document id even if doc id expression is set.
-     */
     @Test
-    public void testStaticDocIdAndDocIdExp() throws Exception {
-        String docId = "doc-a";
-        String docIdExp = "${someProperty}";
+    public void testDocIdExp() throws Exception {
+        String docIdExp = "${'someProperty'}";
+        String somePropertyValue = "doc-p";
 
         Bucket bucket = mock(Bucket.class);
         String content = "{\"key\":\"value\"}";
-        when(bucket.get(docId, RawJsonDocument.class)).thenReturn(RawJsonDocument.create(docId, content));
+        when(bucket.get(somePropertyValue, RawJsonDocument.class))
+            .thenReturn(RawJsonDocument.create(somePropertyValue, content));
         setupMockBucket(bucket);
 
-        testRunner.setProperty(DOC_ID, docId);
-        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        byte[] inFileData = "input FlowFile data".getBytes(StandardCharsets.UTF_8);
+        Map<String, String> properties = new HashMap<>();
+        properties.put("someProperty", somePropertyValue);
+        testRunner.enqueue(inFileData, properties);
         testRunner.run();
 
-        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(content);
     }
 
     @Test
-    public void testDocIdExp() throws Exception {
-        String docIdExp = "${'someProperty'}";
-        String somePropertyValue = "doc-p";
+    public void testDocIdExpWithNullFlowFile() throws Exception {
+        String docIdExp = "doc-s";
+        String docId = "doc-s";
 
         Bucket bucket = mock(Bucket.class);
         String content = "{\"key\":\"value\"}";
-        when(bucket.get(somePropertyValue, RawJsonDocument.class))
-            .thenReturn(RawJsonDocument.create(somePropertyValue, content));
+        when(bucket.get(docId, RawJsonDocument.class))
+            .thenReturn(RawJsonDocument.create(docId, content));
         setupMockBucket(bucket);
 
-        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.setProperty(DOC_ID, docIdExp);
 
-        byte[] inFileData = "input FlowFile data".getBytes(StandardCharsets.UTF_8);
-        Map<String, String> properties = new HashMap<>();
-        properties.put("someProperty", somePropertyValue);
-        testRunner.enqueue(inFileData, properties);
         testRunner.run();
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
-        testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(content);
     }
 
     @Test
+    public void testDocIdExpWithInvalidExpression() throws Exception {
+        String docIdExp = "${nonExistingFunction('doc-s')}";
+        String docId = "doc-s";
+
+        Bucket bucket = mock(Bucket.class);
+        String content = "{\"key\":\"value\"}";
+        when(bucket.get(docId, RawJsonDocument.class))
+            .thenReturn(RawJsonDocument.create(docId, content));
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+        }
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+    }
+
+    @Test
+    public void testDocIdExpWithInvalidExpressionOnFlowFile() throws Exception {
+        String docIdExp = "${nonExistingFunction(someProperty)}";
+
+        Bucket bucket = mock(Bucket.class);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        Map<String, String> properties = new HashMap<>();
+        properties.put("someProperty", "someValue");
+        testRunner.enqueue(inFileData, properties);
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+        }
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+    }
+
+    @Test
     public void testInputFlowFileContent() throws Exception {
 
         Bucket bucket = mock(Bucket.class);
@@ -171,9 +236,12 @@ public class TestGetCouchbaseKey {
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
         testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(content);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_ORIGINAL).get(0);
+        orgFile.assertContentEquals(inFileDataStr);
     }
 
     @Test
@@ -195,9 +263,12 @@ public class TestGetCouchbaseKey {
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
         testRunner.assertTransferCount(REL_ORIGINAL, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(content);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_ORIGINAL).get(0);
+        orgFile.assertContentEquals(inFileDataStr);
     }
 
 
@@ -213,12 +284,175 @@ public class TestGetCouchbaseKey {
 
         byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
         testRunner.enqueue(inFileData);
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+        }
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+    }
+
+    @Test
+    public void testCouchbaseConfigurationError() throws Exception {
+        String docIdExp = "doc-c";
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenThrow(new AuthenticationException());
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+            Assert.assertTrue(e.getCause().getCause().getClass().equals(AuthenticationException.class));
+        }
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+    }
+
+    @Test
+    public void testCouchbaseInvalidInputError() throws Exception {
+        String docIdExp = "doc-c";
+
+        Bucket bucket = mock(Bucket.class);
+        CouchbaseException exception = new RequestTooBigException();
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenThrow(exception);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 1);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        orgFile.assertContentEquals(inputFileDataStr);
+        orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
+    }
+
+    @Test
+    public void testCouchbaseTempClusterError() throws Exception {
+        String docIdExp = "doc-c";
+
+        Bucket bucket = mock(Bucket.class);
+        CouchbaseException exception = new BackpressureException();
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenThrow(exception);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
+        orgFile.assertContentEquals(inputFileDataStr);
+        orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
+    }
+
+
+    @Test
+    public void testCouchbaseTempFlowFileError() throws Exception {
+        String docIdExp = "doc-c";
+
+        Bucket bucket = mock(Bucket.class);
+        // There is no suitable CouchbaseException for temp flowfile error, currently.
+        CouchbaseException exception = new DurabilityException();
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenThrow(exception);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
+        orgFile.assertContentEquals(inputFileDataStr);
+        orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
+    }
+
+    @Test
+    public void testCouchbaseFatalError() throws Exception {
+        String docIdExp = "doc-c";
+
+        Bucket bucket = mock(Bucket.class);
+        CouchbaseException exception = new NotConnectedException();
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenThrow(exception);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
+        testRunner.run();
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 1);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        orgFile.assertContentEquals(inputFileDataStr);
+        orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
+    }
+
+    @Test
+    public void testDocumentNotFound() throws Exception {
+        String docIdExp = "doc-n";
+
+        Bucket bucket = mock(Bucket.class);
+        when(bucket.get(docIdExp, RawJsonDocument.class))
+            .thenReturn(null);
+        setupMockBucket(bucket);
+
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        String inputFileDataStr = "input FlowFile data";
+        byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
+        testRunner.enqueue(inFileData);
         testRunner.run();
 
         testRunner.assertTransferCount(REL_SUCCESS, 0);
         testRunner.assertTransferCount(REL_ORIGINAL, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 1);
-        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
-        outFile.assertContentEquals(inFileDataStr);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
+        orgFile.assertContentEquals(inputFileDataStr);
+        orgFile.assertAttributeEquals(Exception.key(), DocumentDoesNotExistException.class.getName());
     }
 }

http://git-wip-us.apache.org/repos/asf/nifi/blob/72eb64e8/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
index 3995528..0388e35 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/src/test/java/org/apache/nifi/processors/couchbase/TestPutCouchbaseKey.java
@@ -16,13 +16,15 @@
  */
 package org.apache.nifi.processors.couchbase;
 
+import static org.apache.nifi.couchbase.CouchbaseAttributes.Exception;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.BUCKET_NAME;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.COUCHBASE_CLUSTER_SERVICE;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
-import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID_EXP;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
+import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_RETRY;
 import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
@@ -38,14 +40,18 @@ import java.util.Map;
 import org.apache.nifi.couchbase.CouchbaseAttributes;
 import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
 import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.processor.exception.ProcessException;
 import org.apache.nifi.reporting.InitializationException;
 import org.apache.nifi.util.MockFlowFile;
 import org.apache.nifi.util.TestRunner;
 import org.apache.nifi.util.TestRunners;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.ArgumentCaptor;
 
+import com.couchbase.client.core.CouchbaseException;
+import com.couchbase.client.core.ServiceNotAvailableException;
 import com.couchbase.client.java.Bucket;
 import com.couchbase.client.java.PersistTo;
 import com.couchbase.client.java.ReplicateTo;
@@ -102,6 +108,7 @@ public class TestPutCouchbaseKey {
 
         testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(inFileData);
@@ -134,44 +141,44 @@ public class TestPutCouchbaseKey {
 
         testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(inFileData);
     }
 
-    /**
-     * Use static document id even if doc id expression is set.
-     */
     @Test
-    public void testStaticDocIdAndDocIdExp() throws Exception {
-        String docId = "doc-a";
-        String docIdExp = "${someProperty}";
+    public void testDocIdExp() throws Exception {
+        String docIdExp = "${'someProperty'}";
+        String somePropertyValue = "doc-p";
 
         String inFileData = "{\"key\":\"value\"}";
         byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
 
         Bucket bucket = mock(Bucket.class);
         when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
-            .thenReturn(RawJsonDocument.create(docId, inFileData));
+            .thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
         setupMockBucket(bucket);
 
-        testRunner.enqueue(inFileDataBytes);
-        testRunner.setProperty(DOC_ID, docId);
-        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.setProperty(DOC_ID, docIdExp);
+
+        Map<String, String> properties = new HashMap<>();
+        properties.put("someProperty", somePropertyValue);
+        testRunner.enqueue(inFileDataBytes, properties);
         testRunner.run();
 
         verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
 
-        testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(inFileData);
     }
 
     @Test
-    public void testDocIdExp() throws Exception {
-        String docIdExp = "${'someProperty'}";
+    public void testInvalidDocIdExp() throws Exception {
+        String docIdExp = "${invalid_function(someProperty)}";
         String somePropertyValue = "doc-p";
 
         String inFileData = "{\"key\":\"value\"}";
@@ -182,19 +189,21 @@ public class TestPutCouchbaseKey {
             .thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
         setupMockBucket(bucket);
 
-        testRunner.setProperty(DOC_ID_EXP, docIdExp);
+        testRunner.setProperty(DOC_ID, docIdExp);
 
         Map<String, String> properties = new HashMap<>();
         properties.put("someProperty", somePropertyValue);
         testRunner.enqueue(inFileDataBytes, properties);
-        testRunner.run();
-
-        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+        }
 
-        testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
-        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
-        outFile.assertContentEquals(inFileData);
     }
 
     @Test
@@ -219,6 +228,7 @@ public class TestPutCouchbaseKey {
         assertEquals(uuid, capture.getValue().id());
 
         testRunner.assertTransferCount(REL_SUCCESS, 1);
+        testRunner.assertTransferCount(REL_RETRY, 0);
         testRunner.assertTransferCount(REL_FAILURE, 0);
         MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
         outFile.assertContentEquals(inFileData);
@@ -235,20 +245,53 @@ public class TestPutCouchbaseKey {
 
         Bucket bucket = mock(Bucket.class);
         when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
-            .thenThrow(new DurabilityException());
+            .thenThrow(new ServiceNotAvailableException());
         setupMockBucket(bucket);
 
         testRunner.enqueue(inFileDataBytes);
         testRunner.setProperty(DOC_ID, docId);
         testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
-        testRunner.run();
+        try {
+            testRunner.run();
+            fail("ProcessException should be throws.");
+        } catch (AssertionError e){
+            Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
+        }
 
         verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
 
         testRunner.assertAllFlowFilesTransferred(REL_FAILURE);
         testRunner.assertTransferCount(REL_SUCCESS, 0);
-        testRunner.assertTransferCount(REL_FAILURE, 1);
-        MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
-        outFile.assertContentEquals(inFileData);
+        testRunner.assertTransferCount(REL_RETRY, 0);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+    }
+
+    @Test
+    public void testCouchbaseTempFlowFileError() throws Exception {
+
+        String docId = "doc-a";
+
+        String inFileData = "{\"key\":\"value\"}";
+        byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
+
+        Bucket bucket = mock(Bucket.class);
+        CouchbaseException exception = new DurabilityException();
+        when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
+            .thenThrow(exception);
+        setupMockBucket(bucket);
+
+        testRunner.enqueue(inFileDataBytes);
+        testRunner.setProperty(DOC_ID, docId);
+        testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
+        testRunner.run();
+
+        verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
+
+        testRunner.assertTransferCount(REL_SUCCESS, 0);
+        testRunner.assertTransferCount(REL_RETRY, 1);
+        testRunner.assertTransferCount(REL_FAILURE, 0);
+        MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
+        orgFile.assertContentEquals(inFileData);
+        orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
     }
 }


[04/17] nifi git commit: Previous commits for NIFI-992 provide the functionality as specified in Github PR.

Posted by ma...@apache.org.
Previous commits for NIFI-992 provide the functionality as specified in Github PR.

This closes #96.


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/883333cb
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/883333cb
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/883333cb

Branch: refs/heads/NIFI-810-InputRequirement
Commit: 883333cb00083fc5b0ee169122d1a4b41ee9d524
Parents: 033a155
Author: Bryan Bende <bb...@apache.org>
Authored: Thu Oct 1 15:12:22 2015 -0400
Committer: Bryan Bende <bb...@apache.org>
Committed: Thu Oct 1 15:12:22 2015 -0400

----------------------------------------------------------------------

----------------------------------------------------------------------