You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by GitBox <gi...@apache.org> on 2018/11/23 19:55:33 UTC

[GitHub] oleewere closed pull request #18: AMBARI-24946 - Infra Manager: use HDFS client when writing to cloud destinations

oleewere closed pull request #18: AMBARI-24946 - Infra Manager: use HDFS client when writing to cloud destinations
URL: https://github.com/apache/ambari-infra/pull/18
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java
index 9c8fbcfa..fe4ff7ef 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java
@@ -43,6 +43,16 @@ public S3Client(String host, int port, String bucket) {
     }
   }
 
+  public void createBucket() {
+    try {
+      if (!s3client.bucketExists(bucket))
+        s3client.makeBucket(bucket);
+    }
+    catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
   public void putObject(String key, InputStream inputStream, long length) {
     try {
       s3client.putObject(bucket, key, inputStream, length, "application/octet-stream");
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
index 6ac37099..90350073 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
@@ -25,7 +25,6 @@
 import static org.apache.ambari.infra.TestUtil.runCommand;
 
 import java.io.File;
-import java.io.IOException;
 import java.net.URL;
 import java.time.OffsetDateTime;
 import java.util.Date;
@@ -35,17 +34,14 @@
 import org.apache.ambari.infra.S3Client;
 import org.apache.ambari.infra.Solr;
 import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.apache.solr.common.SolrInputDocument;
 import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.BeforeStories;
 
+import spark.resource.ClassPathResource;
+
 public abstract class AbstractInfraSteps {
   private static final Logger logger = LogManager.getLogger(AbstractInfraSteps.class);
 
@@ -76,6 +72,10 @@ public String getLocalDataFolder() {
     return ambariFolder + "/ambari-infra/ambari-infra-manager/docker/test-out";
   }
 
+  public String getInfraManagerConfDir() {
+    return ambariFolder + "/ambari-infra/ambari-infra-manager/ambari-infra-manager/target/package/conf";
+  }
+
   @BeforeStories
   public void initDockerContainer() throws Exception {
     System.setProperty("HADOOP_USER_NAME", "root");
@@ -89,6 +89,8 @@ public void initDockerContainer() throws Exception {
       FileUtils.cleanDirectory(new File(localDataFolder));
     }
 
+    FileUtils.copyDirectory(new ClassPathResource("conf").getFile(), new File(getInfraManagerConfDir()));
+
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh";
     logger.info("Create new docker container for testing Ambari Infra Manager ...");
     runCommand(new String[]{shellScriptLocation, "start"});
@@ -103,6 +105,7 @@ public void initDockerContainer() throws Exception {
 
     logger.info("Initializing s3 client");
     s3client = new S3Client(dockerHost, FAKE_S3_PORT, S3_BUCKET_NAME);
+    s3client.createBucket();
 
     checkInfraManagerReachable();
   }
@@ -158,24 +161,7 @@ public void shutdownContainers() throws Exception {
     logger.info("Found {} files on s3.", objectKeys.size());
     objectKeys.forEach(objectKey ->  logger.info("Found file on s3 with key {}", objectKey));
 
-    logger.info("Listing files on hdfs.");
-    try (FileSystem fileSystem = getHdfs()) {
-      int count = 0;
-      RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path("/test_audit_logs"), true);
-      while (it.hasNext()) {
-        logger.info("Found file on hdfs with name {}", it.next().getPath().getName());
-        ++count;
-      }
-      logger.info("{} files found on hfds", count);
-    }
-
     logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
-
-  protected FileSystem getHdfs() throws IOException {
-    Configuration conf = new Configuration();
-    conf.set("fs.defaultFS", String.format("hdfs://%s:%d/", dockerHost, HDFS_PORT));
-    return FileSystem.get(conf);
-  }
 }
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
index ff96c9a1..e67762e3 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
@@ -29,9 +29,7 @@
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.IOException;
 import java.io.InputStreamReader;
-import java.io.UncheckedIOException;
 import java.time.Duration;
 import java.time.OffsetDateTime;
 import java.util.Arrays;
@@ -45,13 +43,10 @@
 import org.apache.ambari.infra.JobExecutionInfo;
 import org.apache.ambari.infra.S3Client;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.jbehave.core.annotations.AfterScenario;
 import org.jbehave.core.annotations.Given;
 import org.jbehave.core.annotations.Then;
 import org.jbehave.core.annotations.When;
@@ -185,26 +180,6 @@ private boolean isSolrEmpty(SolrQuery query) {
     return getSolr().query(query).getResults().isEmpty();
   }
 
-  @Then("Check $count files exists on hdfs with filenames containing the text $text in the folder $path after $waitSec seconds")
-  public void checkNumberOfFilesOnHdfs(int count, String text, String path, int waitSec) throws Exception {
-    try (FileSystem fileSystem = getHdfs()) {
-      doWithin(waitSec, "check uploaded files to hdfs", () -> {
-        try {
-          int fileCount = 0;
-          RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path(path), true);
-          while (it.hasNext()) {
-            if (it.next().getPath().getName().contains(text))
-              ++fileCount;
-          }
-          return fileCount == count;
-        }
-        catch (IOException e) {
-          throw new UncheckedIOException(e);
-        }
-      });
-    }
-  }
-
   @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName")
   public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) {
     File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId()));
@@ -239,4 +214,9 @@ public void checkStoredDocumentIds(String fileNamePart) throws Exception {
     assertThat(documentIds.size(), is(0));
     assertThat(storedDocumentIds.size(), is(size));
   }
+
+  @AfterScenario
+  public void waitABit() throws InterruptedException {
+    Thread.sleep(5000);
+  }
 }
diff --git a/ambari-infra-manager-it/src/test/resources/conf/core-site.xml b/ambari-infra-manager-it/src/test/resources/conf/core-site.xml
new file mode 100644
index 00000000..1148c851
--- /dev/null
+++ b/ambari-infra-manager-it/src/test/resources/conf/core-site.xml
@@ -0,0 +1,42 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>fs.defaultFS</name>
+    <value>s3a://testbucket</value>
+  </property>
+  <property>
+    <name>fs.s3a.endpoint</name>
+    <value>http://fakes3:4569</value>
+  </property>
+  <property>
+    <name>fs.s3a.access.key</name>
+    <value>MyAccessKey</value>
+  </property>
+  <property>
+    <name>fs.s3a.secret.key</name>
+    <value>MySecretKey</value>
+  </property>
+  <property>
+    <name>fs.s3a.path.style.access</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>fs.s3a.multiobjectdelete.enable</name>
+    <value>false</value>
+  </property>
+</configuration>
\ No newline at end of file
diff --git a/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml b/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml
new file mode 100644
index 00000000..b529b919
--- /dev/null
+++ b/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property><name>dfs.namenode.rpc-address</name><value>namenode:9000</value></property>
+  <property><name>dfs.replication</name><value>1</value></property>
+</configuration>
diff --git a/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties b/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties
new file mode 100644
index 00000000..bb75fad3
--- /dev/null
+++ b/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties
@@ -0,0 +1,67 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+infra-manager.batch.db.file=job-repository.db
+infra-manager.batch.db.init=false
+infra-manager.batch.db.username=admin
+infra-manager.batch.db.password=admin
+management.security.enabled=false
+management.health.solr.enabled=false
+infra-manager.server.data.folder=/tmp/ambariInfraManager
+
+# Archive Service Logs
+infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logtime:[${start} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
+infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
+infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archives/service_logs
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_file_permission=644
+infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
+infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
+infra-manager.jobs.solr_data_archiving.archive_service_logs.ttl=PT24H
+infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=false
+infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 * * * * ?
+# Archive Audit Logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=evtTime:[${start} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(evtTime:${evtTime} AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=evtTime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime} TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=evtTime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/archives/audit_logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_file_permission=644
+# Delete Audit Logs
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=evtTime
+# Job data clean up
+infra-manager.jobs.clean-up.ttl=PT240H
+infra-manager.jobs.clean-up.scheduling.enabled=true
+infra-manager.jobs.clean-up.scheduling.cron=0 * * * * ?
diff --git a/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story b/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
index 806dc845..bfa9e4b9 100644
--- a/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
+++ b/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
@@ -9,31 +9,31 @@ Scenario: Exporting 10 documents using writeBlockSize=3 produces 4 files
 
 Given 10 documents in solr with logtime from 2010-10-09T05:00:00.000Z to 2010-10-09T20:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-10-09T00:00:00.000Z,end=2010-10-11T00:00:00.000Z after 2 seconds
-Then Check 4 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-10-09 after 20 seconds
+Then Check 4 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2010-10-09 after 20 seconds
 And solr does not contain documents between 2010-10-09T05:00:00.000Z and 2010-10-09T20:00:00.000Z after 5 seconds
-And Check the files solr_archive_audit_logs_-_2010-10-09 contains the archived documents
+And Check the files archives/audit_logs/audit_logs_-_2010-10-09 contains the archived documents
 
 
 Scenario: Running archiving job with a bigger start value than end value exports and deletes 0 documents
 
 Given 10 documents in solr with logtime from 2010-01-01T05:00:00.000Z to 2010-01-04T05:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-01-03T05:00:00.000Z,end=2010-01-02T05:00:00.000Z after 2 seconds
-Then No file exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-01-0
+Then No file exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2010-01-0
 And solr contains 10 documents between 2010-01-01T05:00:00.000Z and 2010-01-04T05:00:00.000Z
 
 
 Scenario: Archiving job fails when part of the data is exported. After resolving the issue and restarting the job exports the rest of the data.
 
 Given 200 documents in solr with logtime from 2011-10-09T05:00:00.000Z to 2011-10-09T20:00:00.000Z
-And a file on s3 with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2
+And a file on s3 with key archives/audit_logs/audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2
 When start archive_audit_logs job with parameters writeBlockSize=20,start=2010-11-09T00:00:00.000Z,end=2011-10-11T00:00:00.000Z after 2 seconds
-Then Check 3 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
+Then Check 3 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T07:59:59.999Z after 5 seconds
-When delete file with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2 from s3
+When delete file with key archives/audit_logs/audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2 from s3
 And restart archive_audit_logs job within 2 seconds
-Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
+Then Check 10 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T20:00:00.000Z after 5 seconds
-And Check the files solr_archive_audit_logs_-_2011-10-09 contains the archived documents
+And Check the files archives/audit_logs/audit_logs_-_2011-10-09 contains the archived documents
 
 
 Scenario: After Deleting job deletes documents from solr no document found in the specified interval
@@ -43,14 +43,6 @@ When start delete_audit_logs job with parameters start=2012-10-09T05:00:00.000Z,
 Then solr does not contain documents between 2012-10-09T05:00:00.000Z and 2012-10-09T20:00:00.000Z after 5 seconds
 
 
-Scenario: Archiving documents to hdfs
-
-Given 1000 documents in solr with logtime from 2014-01-04T05:00:00.000Z to 2014-01-06T20:00:00.000Z
-When start archive_audit_logs job with parameters start=2014-01-04T05:00:00.000Z,end=2014-01-06T20:00:00.000Z,destination=HDFS,hdfsDestinationDirectory=/test_audit_logs after 2 seconds
-Then Check 7 files exists on hdfs with filenames containing the text audit_logs_-_2014-01-0 in the folder /test_audit_logs after 10 seconds
-And solr does not contain documents between 2014-01-04T05:00:00.000Z and 2014-01-06T20:00:00.000Z after 10 seconds
-
-
 Scenario: Archiving documents to local filesystem
 
 Given 200 documents in solr with logtime from 2014-02-04T05:00:00.000Z to 2014-02-06T20:00:00.000Z
@@ -63,8 +55,8 @@ Scenario: Launch Archiving job. Initiate stop and check that part of the data is
 
 Given 500 documents in solr with logtime from 2014-03-09T05:00:00.000Z to 2014-03-09T20:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=20,start=2014-03-09T05:00:00.000Z,end=2014-03-09T20:00:00.000Z after 2 seconds
-And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text solr_archive_audit_logs_-_2014-03-09 within 10 seconds
-Then Less than 20 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
+And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text archives/audit_logs/audit_logs_-_2014-03-09 within 10 seconds
+Then Less than 20 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2014-03-09 after 20 seconds
 When restart archive_audit_logs job within 10 seconds
-Then Check 25 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
-And Check the files solr_archive_audit_logs_-_2014-03-09 contains the archived documents
\ No newline at end of file
+Then Check 25 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2014-03-09 after 20 seconds
+And Check the files archives/audit_logs/audit_logs_-_2014-03-09 contains the archived documents
\ No newline at end of file
diff --git a/ambari-infra-manager/pom.xml b/ambari-infra-manager/pom.xml
index 3b1f7de2..722bf51a 100644
--- a/ambari-infra-manager/pom.xml
+++ b/ambari-infra-manager/pom.xml
@@ -41,6 +41,7 @@
     <spring-boot.version>2.0.6.RELEASE</spring-boot.version>
     <swagger.version>1.5.16</swagger.version>
     <jjwt.version>0.6.0</jjwt.version>
+    <aws-sdk.version>1.11.445</aws-sdk.version>
   </properties>
 
   <build>
@@ -353,6 +354,35 @@
         </exclusion>
       </exclusions>
     </dependency>
+
+    <!-- AWS -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.amazonaws</groupId>
+          <artifactId>aws-java-sdk-bundle</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-core</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-dynamodb</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+
     <dependency>
       <groupId>com.google.code.gson</groupId>
       <artifactId>gson</artifactId>
@@ -496,11 +526,6 @@
       <artifactId>commons-compress</artifactId>
       <version>1.18</version>
     </dependency>
-    <dependency>
-      <groupId>io.minio</groupId>
-      <artifactId>minio</artifactId>
-      <version>5.0.1</version>
-    </dependency>
   </dependencies>
 
 </project>
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
index 0e5196d1..008dec59 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
@@ -35,23 +35,6 @@ public HadoopCredentialStore hadoopCredentialStore() {
     return new HadoopCredentialStore(credentialStoreProviderPath);
   }
 
-  @Bean
-  public S3Secrets s3SecretStore(HadoopCredentialStore hadoopCredentialStore) {
-    return new S3Secrets(s3AccessKeyId(hadoopCredentialStore), s3SecretKeyId(hadoopCredentialStore));
-  }
-
-  private Secret s3AccessKeyId(HadoopCredentialStore hadoopCredentialStore) {
-    return new CompositeSecret(
-            hadoopCredentialStore.getSecret( "AWS_ACCESS_KEY_ID"),
-            new EnvironmentalSecret("AWS_ACCESS_KEY_ID"));
-  }
-
-  private Secret s3SecretKeyId(HadoopCredentialStore hadoopCredentialStore) {
-    return new CompositeSecret(
-            hadoopCredentialStore.getSecret( "AWS_SECRET_ACCESS_KEY"),
-            new EnvironmentalSecret("AWS_SECRET_ACCESS_KEY"));
-  }
-
   @Bean
   public SslSecrets sslSecrets(HadoopCredentialStore hadoopCredentialStore) {
     return new SslSecrets(
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/S3Secrets.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/S3Secrets.java
deleted file mode 100644
index 30a1ca9c..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/S3Secrets.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-public class S3Secrets {
-  private final Secret s3AccessKeyId;
-  private final Secret s3SecretAccessKey;
-
-  public S3Secrets(Secret s3AccessKeyId, Secret s3SecretAccessKey) {
-    this.s3AccessKeyId = s3AccessKeyId;
-    this.s3SecretAccessKey = s3SecretAccessKey;
-  }
-
-
-  public Secret getS3AccessKeyId() {
-    return s3AccessKeyId;
-  }
-
-  public Secret getS3SecretAccessKey() {
-    return s3SecretAccessKey;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
index 3ad3926f..39ac398e 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
@@ -21,7 +21,6 @@
 import static java.util.Objects.requireNonNull;
 import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS;
 import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL;
-import static org.apache.ambari.infra.job.archive.ExportDestination.S3;
 import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
 import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
 import static org.apache.commons.lang.StringUtils.isBlank;
@@ -49,10 +48,6 @@
   private String fileNameSuffixColumn;
   private String fileNameSuffixDateFormat;
   private SolrProperties solr;
-  private String s3AccessFile;
-  private String s3KeyPrefix;
-  private String s3BucketName;
-  private String s3Endpoint;
   private String hdfsEndpoint;
   private String hdfsDestinationDirectory;
   @JsonSerialize(converter = FsPermissionToStringConverter.class)
@@ -122,38 +117,6 @@ public void setSolr(SolrProperties solr) {
     this.solr = solr;
   }
 
-  public String getS3AccessFile() {
-    return s3AccessFile;
-  }
-
-  public void setS3AccessFile(String s3AccessFile) {
-    this.s3AccessFile = s3AccessFile;
-  }
-
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
-  }
-
-  public void setS3KeyPrefix(String s3KeyPrefix) {
-    this.s3KeyPrefix = s3KeyPrefix;
-  }
-
-  public String getS3BucketName() {
-    return s3BucketName;
-  }
-
-  public void setS3BucketName(String s3BucketName) {
-    this.s3BucketName = s3BucketName;
-  }
-
-  public String getS3Endpoint() {
-    return s3Endpoint;
-  }
-
-  public void setS3Endpoint(String s3Endpoint) {
-    this.s3Endpoint = s3Endpoint;
-  }
-
   public String getHdfsEndpoint() {
     return hdfsEndpoint;
   }
@@ -194,17 +157,6 @@ public void setHdfsKerberosKeytabPath(String hdfsKerberosKeytabPath) {
     this.hdfsKerberosKeytabPath = hdfsKerberosKeytabPath;
   }
 
-  public Optional<S3Properties> s3Properties() {
-    if (isBlank(s3BucketName))
-      return Optional.empty();
-
-    return Optional.of(new S3Properties(
-            s3AccessFile,
-            s3KeyPrefix,
-            s3BucketName,
-            s3Endpoint));
-  }
-
   public Optional<HdfsProperties> hdfsProperties() {
     if (isBlank(hdfsDestinationDirectory))
       return Optional.empty();
@@ -261,12 +213,6 @@ public void validate() {
                   "The property localDestinationDirectory can not be null or empty string when destination is set to %s!", LOCAL.name()));
         break;
 
-      case S3:
-        s3Properties()
-                .orElseThrow(() -> new IllegalArgumentException("S3 related properties must be set if the destination is " + S3.name()))
-                .validate();
-        break;
-
       case HDFS:
         hdfsProperties()
                 .orElseThrow(() -> new IllegalArgumentException("HDFS related properties must be set if the destination is " + HDFS.name()))
@@ -286,10 +232,6 @@ public ArchivingProperties merge(JobParameters jobParameters) {
     archivingProperties.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
     archivingProperties.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
     archivingProperties.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
-    archivingProperties.setS3AccessFile(jobParameters.getString("s3AccessFile", s3AccessFile));
-    archivingProperties.setS3BucketName(jobParameters.getString("s3BucketName", s3BucketName));
-    archivingProperties.setS3KeyPrefix(jobParameters.getString("s3KeyPrefix", s3KeyPrefix));
-    archivingProperties.setS3Endpoint(jobParameters.getString("s3Endpoint", s3Endpoint));
     archivingProperties.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
     archivingProperties.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
     archivingProperties.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
index b35c1d2d..22040a98 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
@@ -27,7 +27,6 @@
 import javax.inject.Inject;
 
 import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.apache.ambari.infra.conf.security.S3Secrets;
 import org.apache.ambari.infra.job.AbstractJobsConfiguration;
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.apache.ambari.infra.job.JobScheduler;
@@ -89,17 +88,11 @@ public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
                                            InfraManagerDataConfig infraManagerDataConfig,
                                            @Value("#{jobParameters[end]}") String intervalEnd,
                                            DocumentWiper documentWiper,
-                                           JobContextRepository jobContextRepository,
-                                           S3Secrets s3Secrets) {
+                                           JobContextRepository jobContextRepository) {
 
     File baseDir = new File(infraManagerDataConfig.getDataFolder(), "exporting");
     CompositeFileAction fileAction = new CompositeFileAction(new BZip2Compressor());
     switch (parameters.getDestination()) {
-      case S3:
-        fileAction.add(new S3Uploader(
-                parameters.s3Properties().orElseThrow(() -> new IllegalStateException("S3 properties are not provided!")),
-                s3Secrets));
-        break;
       case HDFS:
         org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
         fileAction.add(new HdfsUploader(conf,
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
index a143e4c5..703d7fb7 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
@@ -20,6 +20,5 @@
 
 public enum ExportDestination {
   LOCAL,
-  HDFS,
-  S3
+  HDFS
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
deleted file mode 100644
index 3f541b8a..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.csv.CSVFormat.DEFAULT;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.UncheckedIOException;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Optional;
-
-import org.apache.ambari.infra.conf.security.Secret;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-public class S3AccessCsv implements Secret {
-  private static final Logger logger = LogManager.getLogger(S3AccessCsv.class);
-  public static final String ACCESS_KEY_ID = "Access key ID";
-  public static final String SECRET_ACCESS_KEY = "Secret access key";
-
-
-  public static S3AccessCsv file(String path, String propertyName) {
-    try {
-      return new S3AccessCsv(new FileReader(path), propertyName);
-    } catch (FileNotFoundException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-
-  private final Reader reader;
-  private final String propertyName;
-
-  S3AccessCsv(Reader reader, String propertyName) {
-    this.reader = reader;
-    this.propertyName = propertyName;
-  }
-
-  @Override
-  public Optional<String> get() {
-    try (CSVParser csvParser = CSVParser.parse(reader, DEFAULT.withHeader(
-            S3AccessKeyNames.AccessKeyId.getCsvName(), S3AccessKeyNames.SecretAccessKey.getCsvName()))) {
-      Iterator<CSVRecord> iterator = csvParser.iterator();
-      if (!iterator.hasNext()) {
-        throw new S3AccessCsvFormatException("Csv file is empty!");
-      }
-
-      CSVRecord record = iterator.next();
-      if (record.size() < 2) {
-        throw new S3AccessCsvFormatException("Csv file contains less than 2 columns!");
-      }
-
-      checkColumnExists(record, ACCESS_KEY_ID);
-      checkColumnExists(record, SECRET_ACCESS_KEY);
-
-      if (!iterator.hasNext()) {
-        throw new S3AccessCsvFormatException("Csv file contains header only!");
-      }
-
-      record = iterator.next();
-
-      Map<String, Integer> header = csvParser.getHeaderMap();
-      return Optional.ofNullable(record.get(header.get(propertyName)));
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-
-  private void checkColumnExists(CSVRecord record, String s3AccessKeyName) {
-    if (!s3AccessKeyName.equals(record.get(s3AccessKeyName))) {
-      throw new S3AccessCsvFormatException(String.format("Csv file does not contain the required column: '%s'", s3AccessKeyName));
-    }
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
deleted file mode 100644
index ef9d5391..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public class S3AccessCsvFormatException extends RuntimeException {
-  public S3AccessCsvFormatException(String message) {
-    super(message);
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
deleted file mode 100644
index e840d3b3..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public enum S3AccessKeyNames {
-  AccessKeyId("AWS_ACCESS_KEY_ID", "Access key ID"),
-  SecretAccessKey("AWS_SECRET_ACCESS_KEY", "Secret access key");
-
-  private final String envVariableName;
-  private final String csvName;
-
-  S3AccessKeyNames(String envVariableName, String csvName) {
-    this.envVariableName = envVariableName;
-    this.csvName = csvName;
-  }
-
-  public String getEnvVariableName() {
-    return envVariableName;
-  }
-
-  public String getCsvName() {
-    return csvName;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
deleted file mode 100644
index 59a4469e..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class S3Properties {
-  private final String s3AccessFile;
-  private final String s3KeyPrefix;
-  private final String s3BucketName;
-  private final String s3EndPoint;
-
-  public S3Properties(String s3AccessFile, String s3KeyPrefix, String s3BucketName, String s3EndPoint) {
-    this.s3AccessFile = s3AccessFile;
-    this.s3KeyPrefix = s3KeyPrefix;
-    this.s3BucketName = s3BucketName;
-    this.s3EndPoint = s3EndPoint;
-  }
-
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
-  }
-
-  public String getS3BucketName() {
-    return s3BucketName;
-  }
-
-  public String getS3EndPoint() {
-    return s3EndPoint;
-  }
-
-  public String getS3AccessFile() {
-    return s3AccessFile;
-  }
-
-  @Override
-  public String toString() {
-    return "S3Properties{" +
-            "s3AccessFile='" + s3AccessFile + '\'' +
-            ", s3KeyPrefix='" + s3KeyPrefix + '\'' +
-            ", s3BucketName='" + s3BucketName + '\'' +
-            ", s3EndPoint='" + s3EndPoint + '\'' +
-            '}';
-  }
-
-  public void validate() {
-    if (isBlank(s3BucketName))
-      throw new IllegalArgumentException("The property s3BucketName can not be null or empty string!");
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
deleted file mode 100644
index f3c92b7d..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isNotBlank;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.security.InvalidKeyException;
-import java.security.NoSuchAlgorithmException;
-
-import org.apache.ambari.infra.conf.security.CompositeSecret;
-import org.apache.ambari.infra.conf.security.S3Secrets;
-import org.apache.ambari.infra.conf.security.Secret;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.xmlpull.v1.XmlPullParserException;
-
-import io.minio.MinioClient;
-import io.minio.errors.ErrorResponseException;
-import io.minio.errors.InsufficientDataException;
-import io.minio.errors.InternalException;
-import io.minio.errors.InvalidArgumentException;
-import io.minio.errors.InvalidBucketNameException;
-import io.minio.errors.InvalidEndpointException;
-import io.minio.errors.InvalidPortException;
-import io.minio.errors.NoResponseException;
-import io.minio.errors.RegionConflictException;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3Uploader extends AbstractFileAction {
-
-  private static final Logger logger = LogManager.getLogger(S3Uploader.class);
-
-  private final MinioClient client;
-  private final String keyPrefix;
-  private final String bucketName;
-  private final Secret s3AccessKey;
-  private final Secret s3SecretKey;
-
-  public S3Uploader(S3Properties s3Properties, S3Secrets s3Secrets) {
-    logger.info("Initializing S3 client with " + s3Properties);
-
-    this.keyPrefix = s3Properties.getS3KeyPrefix();
-    this.bucketName = s3Properties.getS3BucketName();
-
-    if (isNotBlank(s3Properties.getS3AccessFile())) {
-      this.s3AccessKey = new CompositeSecret(s3Secrets.getS3AccessKeyId(), S3AccessCsv.file(s3Properties.getS3AccessFile(), "Access key ID"));
-      this.s3SecretKey = new CompositeSecret(s3Secrets.getS3SecretAccessKey(), S3AccessCsv.file(s3Properties.getS3AccessFile(), "Secret access key"));
-    }
-    else {
-      this.s3AccessKey = s3Secrets.getS3AccessKeyId();
-      this.s3SecretKey = s3Secrets.getS3SecretAccessKey();
-    }
-
-    try {
-      client = new MinioClient(s3Properties.getS3EndPoint(),
-              s3AccessKey.get().orElseThrow(() -> new IllegalArgumentException("Access key Id is not present!")),
-              s3SecretKey.get().orElseThrow(() -> new IllegalArgumentException("Secret Access Key is not present!")));
-
-      if (!client.bucketExists(bucketName))
-        client.makeBucket(bucketName);
-
-    } catch (RegionConflictException | XmlPullParserException | InvalidBucketNameException | NoSuchAlgorithmException | InsufficientDataException | ErrorResponseException | InvalidKeyException | NoResponseException | InvalidPortException | InvalidEndpointException | InternalException e) {
-      throw new RuntimeException(e);
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-
-  @Override
-  public File onPerform(File inputFile) {
-    String key = keyPrefix + inputFile.getName();
-
-    try {
-      if (client.listObjects(bucketName, key).iterator().hasNext()) {
-        throw new UnsupportedOperationException(String.format("Object '%s' already exists in bucket '%s'", key, bucketName));
-      }
-
-      try (FileInputStream fileInputStream = new FileInputStream(inputFile)) {
-        client.putObject(bucketName, key, fileInputStream, inputFile.length(), "application/json");
-        return inputFile;
-      }
-    } catch (InvalidKeyException | NoSuchAlgorithmException | NoResponseException | XmlPullParserException | InvalidArgumentException | InvalidBucketNameException | ErrorResponseException | InternalException | InsufficientDataException e) {
-      throw new RuntimeException(e);
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-}
diff --git a/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra-manager/src/main/resources/infra-manager.properties
index 50c43e34..938cf021 100644
--- a/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ b/ambari-infra-manager/src/main/resources/infra-manager.properties
@@ -32,7 +32,8 @@ infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=
 #infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL
+infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archives/service_logs
 infra-manager.jobs.solr_data_archiving.archive_service_logs.local_destination_directory=/tmp/ambariInfraManager
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
@@ -50,16 +51,13 @@ infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime} TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=HDFS
 # TODO: logtime may not be enough: The same filename can be generated when more than write_block_size count docs has the same logtime value
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=evtTime
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
+#infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/archives/audit_logs
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.local_destination_directory=/tmp/ambariInfraManager
-#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=<any>.csv
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569
 # Archive Ranger Audit Logs
 infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.enabled=true
 infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
diff --git a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
deleted file mode 100644
index eb4b011f..00000000
--- a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.ambari.infra.job.archive.S3AccessCsv.ACCESS_KEY_ID;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-import java.io.StringReader;
-
-import org.junit.Test;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3AccessCsvTest {
-
-  private static final String VALID_ACCESS_FILE = "Access key ID,Secret access key\n" +
-          "someKey,someSecret\n";
-
-  private static final String ANY_CSV_FILE = "Column1,Column2\n" +
-          "Foo,Bar\n";
-
-  @Test(expected = S3AccessCsvFormatException.class)
-  public void testGetPasswordReturnsNullIfInputIsEmpty() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(""), ACCESS_KEY_ID);
-    assertThat(accessCsv.get().isPresent(), is(false));
-  }
-
-  @Test
-  public void testGetPasswordReturnsAccessAndSecretKeyIfInputIsAValidS3AccessFile() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(VALID_ACCESS_FILE), ACCESS_KEY_ID);
-    assertThat(accessCsv.get().get(), is("someKey"));
-  }
-
-  @Test(expected = S3AccessCsvFormatException.class)
-  public void testGetPasswordThrowsExceptionIfNotAValidS3AccessFileProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(ANY_CSV_FILE), ACCESS_KEY_ID);
-    assertThat(accessCsv.get().isPresent(), is(false));
-  }
-
-  @Test(expected = S3AccessCsvFormatException.class)
-  public void testGetPasswordThrowsExceptionIfAHeaderOnlyS3AccessFileProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Secret access key\n"), ACCESS_KEY_ID);
-    assertThat(accessCsv.get().isPresent(), is(false));
-  }
-
-  @Test(expected = S3AccessCsvFormatException.class)
-  public void testGetPasswordThrowsExceptionIfOnlyOneValidColumnProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Column\n"), ACCESS_KEY_ID);
-    assertThat(accessCsv.get().isPresent(), is(false));
-  }
-}
\ No newline at end of file


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services