You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@nifi.apache.org by yu-iskw <gi...@git.apache.org> on 2015/08/03 02:01:57 UTC

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

GitHub user yu-iskw opened a pull request:

    https://github.com/apache/nifi/pull/72

    [NIFI-774] Create a DeleteS3Object Processor

    [[NIFI-774] Create a DeleteS3Object Processor - ASF JIRA](https://issues.apache.org/jira/browse/NIFI-774)

You can merge this pull request into a Git repository by running:

    $ git pull https://github.com/yu-iskw/nifi NIFI-774

Alternatively you can review and apply these changes as the patch at:

    https://github.com/apache/nifi/pull/72.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

    This closes #72
    
----
commit d993e68e7c5421c489b13e1b59bcc6f032802fe4
Author: Yu ISHIKAWA <yu...@gmail.com>
Date:   2015-07-23T04:13:15Z

    [NIFI-774] Create a DeleteS3Object Processor

----


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36411756
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,109 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.List;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonServiceException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.DeleteObjectRequest;
    +import com.amazonaws.services.s3.model.DeleteVersionRequest;
    +import org.apache.nifi.annotation.behavior.SupportsBatching;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket. " +
    +        "And the FlowFiles are checked if exists or not before deleting.")
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        try {
    +            // Checks if the key exists or not
    +            // If there is no such a key, then throws a exception
    +            s3.getObjectMetadata(bucket, key);
    +
    +            // Deletes a key on Amazon S3
    +            if (versionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final AmazonServiceException ase) {
    +            getLogger().error("Failed to delete S3 Object for {}; routing to failure", new Object[]{flowFile, ase});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "https://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    @markap14 whoops!  I was speaking to a previous version of the code that did a get/delete and passed the content forward.  Based on how the code is now, you are correct.  Just a delete.
    
    I agree, remove the provenance line!  Sorry for the confusion!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36267548
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    --- End diff --
    
    I got it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36269143
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    --- End diff --
    
    When I removed the method, I got a `IllegalStateException`.
    
    ```
    java.lang.AssertionError: java.lang.IllegalStateException: Attempting to Evaluate Expressions but PropertyDescriptor[Version] indicates that the Expression Language is not supported. If you realize that this is the case and do not want this error to occur, it can be disabled by calling TestRunner.setValidateExpressionUsage(false)
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on the pull request:

    https://github.com/apache/nifi/pull/72#issuecomment-127654425
  
    Yu,
        Thank you for the contribution!  I took a look at your processor and noticed a few things I didn't understand.  Perhaps you can elaborate on them.
    
    Dan


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by asfgit <gi...@git.apache.org>.
Github user asfgit closed the pull request at:

    https://github.com/apache/nifi/pull/72


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36270059
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    --- End diff --
    
    It seems that `AmazonS3.deleteObject` method doesn't throw an error, when it tries to delete an object that doesn't existing file. As I expected, we should check if the file exists or not before deleting it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36267498
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    --- End diff --
    
    Alright.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36270771
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    --- End diff --
    
    Do you have any idea to prepare for an idea for the test?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36204722
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    --- End diff --
    
    An entry is added to this map, but the map is never used again.  What is the purpose of this map?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by rahst12 <gi...@git.apache.org>.
Github user rahst12 commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36206754
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    +            attributes.put("s3.bucket", s3Object.getBucketName());
    +
    +            ObjectMetadata metadata = s3Object.getObjectMetadata();
    +            String objectVersionId = metadata.getVersionId();
    +            if (objectVersionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, objectVersionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final IOException | AmazonClientException ioe) {
    +            getLogger().error("Failed to retrieve S3 Object for {}; routing to failure", new Object[]{flowFile, ioe});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "http://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    I believe the default is to hit the https endpoint vs the http one.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36205680
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    --- End diff --
    
    Why wouldn't this field be required?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36270440
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    --- End diff --
    
    Sorry, I'll delete it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36416503
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,109 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.List;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonServiceException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.DeleteObjectRequest;
    +import com.amazonaws.services.s3.model.DeleteVersionRequest;
    +import org.apache.nifi.annotation.behavior.SupportsBatching;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket. " +
    +        "And the FlowFiles are checked if exists or not before deleting.")
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        try {
    +            // Checks if the key exists or not
    +            // If there is no such a key, then throws a exception
    +            s3.getObjectMetadata(bucket, key);
    +
    +            // Deletes a key on Amazon S3
    +            if (versionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final AmazonServiceException ase) {
    +            getLogger().error("Failed to delete S3 Object for {}; routing to failure", new Object[]{flowFile, ase});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "https://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    I got it. I have removed the line. Thanks!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by markap14 <gi...@git.apache.org>.
Github user markap14 commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36411552
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,109 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.List;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonServiceException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.DeleteObjectRequest;
    +import com.amazonaws.services.s3.model.DeleteVersionRequest;
    +import org.apache.nifi.annotation.behavior.SupportsBatching;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket. " +
    +        "And the FlowFiles are checked if exists or not before deleting.")
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        try {
    +            // Checks if the key exists or not
    +            // If there is no such a key, then throws a exception
    +            s3.getObjectMetadata(bucket, key);
    +
    +            // Deletes a key on Amazon S3
    +            if (versionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final AmazonServiceException ase) {
    +            getLogger().error("Failed to delete S3 Object for {}; routing to failure", new Object[]{flowFile, ase});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "https://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    @danbress I am confused - this is telling AWS to delete the data, it is not receiving any data, no?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36203920
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    --- End diff --
    
    I don't see dynamic properties used by this processor, I would suggest removing this annotation.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36204173
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    --- End diff --
    
    I don't see dynamic properties being read by this processor, I would suggest you remove this method override


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36203978
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    --- End diff --
    
    I don't see the 'filename' FlowFile attribute being read by this processor, I would suggest removing this annotation.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36204940
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    --- End diff --
    
    The 'ff' variable is declared but never used.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36205772
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    --- End diff --
    
    Shouldn't this list contain the VERSION_ID PropertyDescriptor?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on the pull request:

    https://github.com/apache/nifi/pull/72#issuecomment-130533859
  
    @danbress @markap14 could you review the update when you have time? Thanks!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36267522
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    --- End diff --
    
    I got it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on the pull request:

    https://github.com/apache/nifi/pull/72#issuecomment-128236732
  
    @danbress @rahst12 thank you for your feedback! I tried to modify some points.
    
    - Remove unnecessary annotations and variables
    - Modify how to prepare for the test
    - Modify how to check if a S3 object exists or not. And if not exist, then throws an exception.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36206528
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    +    public void setUp() throws IOException, URISyntaxException {
    +        final TestRunner runner = TestRunners.newTestRunner(new PutS3Object());
    +        runner.setProperty(PutS3Object.REGION, "ap-northeast-1");
    +        runner.setProperty(PutS3Object.CREDENTAILS_FILE, CREDENTIALS_FILE);
    +        runner.setProperty(PutS3Object.BUCKET, TEST_BUCKET);
    +
    +        final Map<String, String> attrs = new HashMap<>();
    +        attrs.put("filename", "hello.txt");
    +        URL file = this.getClass().getClassLoader().getResource("hello.txt");
    +        runner.enqueue(Paths.get(file.toURI()), attrs);
    +        runner.run(1);
    +    }
    +
    +    @Test
    +    public void testSimpleDelete() throws IOException {
    +        DeleteS3Object deleter = new DeleteS3Object();
    +        final TestRunner runner = TestRunners.newTestRunner(deleter);
    +        runner.setProperty(DeleteS3Object.CREDENTAILS_FILE, CREDENTIALS_FILE);
    +        runner.setProperty(DeleteS3Object.REGION, TEST_REGION);
    +        runner.setProperty(DeleteS3Object.BUCKET, TEST_BUCKET);
    --- End diff --
    
    Doesn't the KEY need to be set, so you can specify which object to delete?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36380347
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    +    public void setUp() throws IOException, URISyntaxException {
    +        final TestRunner runner = TestRunners.newTestRunner(new PutS3Object());
    +        runner.setProperty(PutS3Object.REGION, "ap-northeast-1");
    +        runner.setProperty(PutS3Object.CREDENTAILS_FILE, CREDENTIALS_FILE);
    +        runner.setProperty(PutS3Object.BUCKET, TEST_BUCKET);
    +
    +        final Map<String, String> attrs = new HashMap<>();
    +        attrs.put("filename", "hello.txt");
    +        URL file = this.getClass().getClassLoader().getResource("hello.txt");
    +        runner.enqueue(Paths.get(file.toURI()), attrs);
    +        runner.run(1);
    +    }
    +
    +    @Test
    +    public void testSimpleDelete() throws IOException {
    +        DeleteS3Object deleter = new DeleteS3Object();
    +        final TestRunner runner = TestRunners.newTestRunner(deleter);
    +        runner.setProperty(DeleteS3Object.CREDENTAILS_FILE, CREDENTIALS_FILE);
    +        runner.setProperty(DeleteS3Object.REGION, TEST_REGION);
    +        runner.setProperty(DeleteS3Object.BUCKET, TEST_BUCKET);
    --- End diff --
    
    Yes, we need a key. Thanks!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36380624
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    --- End diff --
    
    I think we should upload a test object to Amazon S3 before the deleting test. It seems that the objects for other S3 test suites is fixed. However, personally, I think we should set a bucket and objects dynamically. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by markap14 <gi...@git.apache.org>.
Github user markap14 commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36409268
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,109 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.List;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonServiceException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.DeleteObjectRequest;
    +import com.amazonaws.services.s3.model.DeleteVersionRequest;
    +import org.apache.nifi.annotation.behavior.SupportsBatching;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket. " +
    +        "And the FlowFiles are checked if exists or not before deleting.")
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        try {
    +            // Checks if the key exists or not
    +            // If there is no such a key, then throws a exception
    +            s3.getObjectMetadata(bucket, key);
    +
    +            // Deletes a key on Amazon S3
    +            if (versionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final AmazonServiceException ase) {
    +            getLogger().error("Failed to delete S3 Object for {}; routing to failure", new Object[]{flowFile, ase});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "https://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    I think this line can be removed. We are not receiving anything. Ideally, we would have a "remote invocation" type of Provenance Event for this (since we're not actually deleting it ourselves, we are asking another service to delete some resource). This event doesn't exist but there is a ticket for it. In the mean time, I would recommend just removing this line all together.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36204625
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    --- End diff --
    
    It looks like you are doing a get then delete from S3, and writing the FlowFile's content with the data you got from S3.  I think this behavior should be documented in the Processor's '@CapabilityDescription'


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36411360
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,109 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.List;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonServiceException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.DeleteObjectRequest;
    +import com.amazonaws.services.s3.model.DeleteVersionRequest;
    +import org.apache.nifi.annotation.behavior.SupportsBatching;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket. " +
    +        "And the FlowFiles are checked if exists or not before deleting.")
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        try {
    +            // Checks if the key exists or not
    +            // If there is no such a key, then throws a exception
    +            s3.getObjectMetadata(bucket, key);
    +
    +            // Deletes a key on Amazon S3
    +            if (versionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final AmazonServiceException ase) {
    +            getLogger().error("Failed to delete S3 Object for {}; routing to failure", new Object[]{flowFile, ase});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "https://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    Mark,
        I do think a "RemoteInvocation" provenance event would be helpful in general.
    
        In this case however, the processor is actually getting the object from AmazonS3, and it is writing that data to the FlowFile content and passing that forward.  You don't think that should emit a Receive event?  To me, this seems similar to what FetchS3Object is doing.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36296186
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    --- End diff --
    
    I think what you have in the testSimpleDelete() is mostly there.  I'm thinking if you specify a KEY you might be good to go.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36204088
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    --- End diff --
    
    I don't see the 's3.bucket', 's3.version' or 'path' FlowFile attributes being written by this processor, I would suggest removing these annotations.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36296116
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    --- End diff --
    
    Fair enough.  Please document that this processor will replace the incoming FlowFile content with the deleted object content.  Or make it configurable via a Processor Property, so that the user can decide if this behavior or not.  Thanks!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36380391
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    +            attributes.put("s3.bucket", s3Object.getBucketName());
    +
    +            ObjectMetadata metadata = s3Object.getObjectMetadata();
    +            String objectVersionId = metadata.getVersionId();
    +            if (objectVersionId == null) {
    +                final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
    +                s3.deleteObject(r);
    +            } else {
    +                final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, objectVersionId);
    +                s3.deleteVersion(r);
    +            }
    +        } catch (final IOException | AmazonClientException ioe) {
    +            getLogger().error("Failed to retrieve S3 Object for {}; routing to failure", new Object[]{flowFile, ioe});
    +            session.transfer(flowFile, REL_FAILURE);
    +            return;
    +        }
    +
    +        session.transfer(flowFile, REL_SUCCESS);
    +        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    +        getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
    +        session.getProvenanceReporter().receive(flowFile, "http://" + bucket + ".amazonaws.com/" + key, transferMillis);
    --- End diff --
    
    Is there any way to check the protocol?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36206235
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/test/java/org/apache/nifi/processors/aws/s3/TestDeleteS3Object.java ---
    @@ -0,0 +1,70 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.net.URL;
    +import java.nio.file.Paths;
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import org.apache.nifi.util.TestRunner;
    +import org.apache.nifi.util.TestRunners;
    +import org.junit.Before;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +@Ignore("For local testing only - interacts with S3 so the credentials file must be configured and all necessary buckets created")
    +public class TestDeleteS3Object {
    +
    +    private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
    +
    +    // When you want to test this, you should create a bucket on Amazon S3 as follows.
    +    private final String TEST_REGION = "us-east-1";
    +    private final String TEST_BUCKET = "test-bucket-00000000-0000-0000-0000-1234567890123";
    +
    +    @Before
    --- End diff --
    
    I'm not sure you need this setUp() method, because you are doing the same sort of thing again in your testSimpleDelete() method


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by yu-iskw <gi...@git.apache.org>.
Github user yu-iskw commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36269663
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    +        return new PropertyDescriptor.Builder()
    +                .name(propertyDescriptorName)
    +                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +                .expressionLanguageSupported(true)
    +                .dynamic(true)
    +                .build();
    +    }
    +
    +    public void onTrigger(final ProcessContext context, final ProcessSession session) {
    +        FlowFile flowFile = session.get();
    +        if (flowFile == null) {
    +            return;
    +        }
    +
    +        final long startNanos = System.nanoTime();
    +
    +        final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    +        final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    +        final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();
    +
    +        final AmazonS3 s3 = getClient();
    +        final FlowFile ff = flowFile;
    +        final GetObjectRequest request;
    +        if (versionId == null) {
    +            request = new GetObjectRequest(bucket, key);
    +        } else {
    +            request = new GetObjectRequest(bucket, key, versionId);
    +        }
    +
    +        final Map<String, String> attributes = new HashMap<>();
    +        try (final S3Object s3Object = s3.getObject(request)) {
    +            flowFile = session.importFrom(s3Object.getObjectContent(), flowFile);
    --- End diff --
    
    Hmm, I am thinking that we don't need to check if a key exists or not. It would be better to remove a key directly.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on the pull request:

    https://github.com/apache/nifi/pull/72#issuecomment-130674437
  
    Yu,
       Thanks for working through this.  There are a few things I still think you need to do:
    
       1) I made comments on line 70 and 61 that I think you need to consider.  I do not believe you need to override "getSupportedDynamicPropertyDescriptors", but in order for that to work you must add "VERSION_ID" to the "properties" list on line 60/61.
    
       2) I think you need to add this class to the service descriptor file located here: nifi-aws-processors/src/main/resources/META-INF/services/org.apache.nifi.processor.Processor
    
    Thanks again!
    
    Dan


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

[GitHub] nifi pull request: [NIFI-774] Create a DeleteS3Object Processor

Posted by danbress <gi...@git.apache.org>.
Github user danbress commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/72#discussion_r36295981
  
    --- Diff: nifi/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/DeleteS3Object.java ---
    @@ -0,0 +1,125 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.aws.s3;
    +
    +import java.io.IOException;
    +import java.util.*;
    +import java.util.concurrent.TimeUnit;
    +
    +import com.amazonaws.AmazonClientException;
    +import com.amazonaws.services.s3.AmazonS3;
    +import com.amazonaws.services.s3.model.*;
    +import org.apache.nifi.annotation.behavior.*;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.flowfile.FlowFile;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.ProcessSession;
    +import org.apache.nifi.processor.util.StandardValidators;
    +
    +@SupportsBatching
    +@SeeAlso({PutS3Object.class})
    +@Tags({"Amazon", "S3", "AWS", "Archive", "Delete"})
    +@CapabilityDescription("Deletes FlowFiles on an Amazon S3 Bucket")
    +@DynamicProperty(name = "The name of a User-Defined Metadata field to add to the S3 Object",
    +        value = "The value of a User-Defined Metadata field to add to the S3 Object",
    +        description = "Allows user-defined metadata to be added to the S3 object as key/value pairs",
    +        supportsExpressionLanguage = true)
    +@ReadsAttribute(attribute = "filename", description = "Uses the FlowFile's filename as the filename for the S3 object")
    +@WritesAttributes({
    +        @WritesAttribute(attribute = "s3.bucket", description = "The name of the S3 bucket"),
    +        @WritesAttribute(attribute = "s3.version", description = "The version of the S3 Object"),
    +        @WritesAttribute(attribute = "path", description = "The path of the file"),
    +})
    +public class DeleteS3Object extends AbstractS3Processor {
    +
    +    public static final PropertyDescriptor VERSION_ID = new PropertyDescriptor.Builder()
    +            .name("Version")
    +            .description("The Version of the Object to delete")
    +            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true)
    +            .required(false)
    +            .build();
    +
    +    public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
    +            Arrays.asList(KEY, BUCKET, ACCESS_KEY, SECRET_KEY, CREDENTAILS_FILE, REGION, TIMEOUT,
    +                    FULL_CONTROL_USER_LIST, READ_USER_LIST, WRITE_USER_LIST, READ_ACL_LIST, WRITE_ACL_LIST, OWNER));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return properties;
    +    }
    +
    +    @Override
    +    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
    --- End diff --
    
       This is because of the comment I put on line 61.  You must add the VERSION_ID PropertyDescriptor to the list of supported properties.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---