You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by kr...@apache.org on 2019/06/24 08:24:18 UTC

[ambari] branch branch-2.7 updated: AMBARI-25130 - Infra Manager: backport fixes from 2.8.0 (#2789)

This is an automated email from the ASF dual-hosted git repository.

krisztiankasa pushed a commit to branch branch-2.7
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-2.7 by this push:
     new 80f58f2  AMBARI-25130 - Infra Manager: backport fixes from 2.8.0 (#2789)
80f58f2 is described below

commit 80f58f2f31ddecf882bb8a14a387a60a58f3b996
Author: kasakrisz <33...@users.noreply.github.com>
AuthorDate: Mon Jun 24 10:24:12 2019 +0200

    AMBARI-25130 - Infra Manager: backport fixes from 2.8.0 (#2789)
---
 ambari-infra/ambari-infra-manager-it/pom.xml       | 105 ++-
 .../java/org/apache/ambari/infra/InfraClient.java  | 113 +--
 .../java/org/apache/ambari/infra/S3Client.java     | 118 ++++
 .../test/java/org/apache/ambari/infra/Solr.java    |  12 +-
 .../java/org/apache/ambari/infra/TestUtil.java     |  12 +-
 .../ambari/infra/solr/metrics/MetricsIT.java       |  18 +-
 .../infra/solr/metrics/MockMetricsServer.java      |  10 +-
 .../ambari/infra/steps/AbstractInfraSteps.java     |  97 ++-
 .../apache/ambari/infra/steps/ExportJobsSteps.java | 182 ++---
 .../src/test/resources/conf/core-site.xml          |  42 ++
 .../src/test/resources/conf/hdfs-site.xml          |  21 +
 .../test/resources/conf}/infra-manager.properties  |  54 +-
 .../{log4j.properties => log4j2-test.properties}   |  19 +-
 .../test/resources/stories/infra_api_tests.story   |  31 +-
 ambari-infra/ambari-infra-manager/.gitignore       |   5 +-
 .../ambari-infra-manager/docker/Dockerfile         |   9 +-
 .../ambari-infra-manager/docker/docker-compose.yml |  17 +-
 .../docker/infra-manager-docker-compose.sh         |   4 +-
 .../docker/infra-manager-docker.sh                 |   4 +-
 .../ambari-infra-manager/docs/api/swagger.yaml     | 784 ---------------------
 ambari-infra/ambari-infra-manager/pom.xml          | 182 ++++-
 .../java/org/apache/ambari/infra/InfraManager.java |   9 +-
 .../ambari/infra/conf/InfraManagerConfig.java      |   4 +-
 .../conf/InfraManagerJerseyResourceConfig.java     |   9 +-
 .../infra/conf/InfraManagerServletConfig.java      |  35 +-
 .../conf/InfraManagerWebServerCustomizer.java      |  84 +++
 .../infra/conf/StaticResourceConfiguration.java    |   4 +-
 .../infra/conf/batch/InfraManagerBatchConfig.java  |  63 +-
 .../conf/batch/InfraManagerBatchConfigurer.java    |  75 ++
 ...sitePasswordStore.java => CompositeSecret.java} |  14 +-
 ...tyEnvironment.java => EnvironmentalSecret.java} |  13 +-
 ...itePasswordStore.java => HadoopCredential.java} |  22 +-
 .../infra/conf/security/HadoopCredentialStore.java |  28 +-
 .../security/{PasswordStore.java => Secret.java}   |   4 +-
 ...rSecurityConfig.java => SecretStoreConfig.java} |  16 +-
 .../{SecurityEnvironment.java => SslSecrets.java}  |  19 +-
 .../infra/conf/security/WebSecurityConfig.java     |  90 +++
 .../infra/doc/InfraManagerApiDocStorage.java       |  90 ---
 .../infra/job/AbstractJobsConfiguration.java       |  23 +-
 .../ambari/infra/job/InfraJobExecutionDao.java     |  75 ++
 .../org/apache/ambari/infra/job/JobProperties.java |  30 +-
 ...bsPropertyMap.java => JobPropertiesHolder.java} |  25 +-
 .../org/apache/ambari/infra/job/JobScheduler.java  |  39 +-
 .../apache/ambari/infra/job/JobsPropertyMap.java   |  35 +-
 .../ambari/infra/job/SchedulingProperties.java     |   9 -
 .../org/apache/ambari/infra/job/SolrDAOBase.java   |  28 +-
 ...essCsvFormatException.java => Validatable.java} |   8 +-
 .../infra/job/archive/AbstractFileAction.java      |  10 +-
 ...ingProperties.java => ArchivingProperties.java} | 188 ++---
 .../{HdfsUploader.java => BZip2Compressor.java}    |  35 +-
 .../apache/ambari/infra/job/archive/Document.java  |  19 +-
 .../archive/DocumentArchivingConfiguration.java    |  74 +-
 .../job/archive/DocumentArchivingPropertyMap.java  |  10 +-
 .../ambari/infra/job/archive/DocumentExporter.java |  18 +-
 .../infra/job/archive/ExportDestination.java       |   3 +-
 .../infra/job/archive/FileNameSuffixFormatter.java |  12 +-
 .../ambari/infra/job/archive/HdfsProperties.java   |  84 +++
 .../ambari/infra/job/archive/HdfsUploader.java     |  49 +-
 .../infra/job/archive/LocalDocumentItemWriter.java |  20 +-
 .../ambari/infra/job/archive/S3AccessCsv.java      |  94 ---
 .../ambari/infra/job/archive/S3Properties.java     |  66 --
 .../ambari/infra/job/archive/S3Uploader.java       |  74 --
 .../apache/ambari/infra/job/archive/SolrDAO.java   |  14 +-
 .../infra/job/archive/SolrDocumentIterator.java    |  19 +-
 .../infra/job/archive/SolrParametrizedString.java  |  23 +-
 .../ambari/infra/job/archive/SolrProperties.java   |  42 +-
 .../ambari/infra/job/archive/SolrQueryBuilder.java |  25 +-
 .../infra/job/cleanup/CleanUpConfiguration.java    |  79 +++
 .../infra/job/cleanup/CleanUpProperties.java       |  67 ++
 .../ambari/infra/job/cleanup/TaskHistoryWiper.java |  57 ++
 ...tingProperties.java => DeletingProperties.java} |  61 +-
 .../deleting/DocumentDeletingConfiguration.java    |  14 +-
 .../job/deleting/DocumentDeletingPropertyMap.java  |  10 +-
 .../infra/job/deleting/DocumentWiperTasklet.java   |  22 +-
 .../ambari/infra/job/dummy/DummyItemProcessor.java |   8 +-
 .../ambari/infra/job/dummy/DummyItemWriter.java    |  27 +-
 .../ambari/infra/job/dummy/DummyJobListener.java   |  12 +-
 .../ambari/infra/job/dummy/DummyStepListener.java  |  10 +-
 .../DurationToStringConverter.java}                |  16 +-
 .../FsPermissionToStringConverter.java}            |  16 +-
 .../OffsetDateTimeToStringConverter.java}          |  14 +-
 .../StringToDurationConverter.java}                |  28 +-
 .../StringToFsPermissionConverter.java}            |  29 +-
 .../apache/ambari/infra/manager/JobManager.java    |  81 ++-
 .../java/org/apache/ambari/infra/manager/Jobs.java |   6 +-
 .../DateUtil.java}                                 |  15 +-
 .../infra/model/ExecutionContextResponse.java      |   4 +-
 ...nStopRequest.java => ISO8601DateFormatter.java} |  47 +-
 .../ambari/infra/model/JobDetailsResponse.java     |  53 --
 .../infra/model/JobExecutionDetailsResponse.java   |  20 +-
 .../infra/model/JobExecutionInfoResponse.java      | 183 +++--
 .../infra/model/JobExecutionRestartRequest.java    |   3 +
 .../infra/model/JobExecutionStopRequest.java       |   3 +
 .../infra/model/JobInstanceDetailsResponse.java    |  30 +-
 .../infra/model/JobInstanceStartRequest.java       |   5 +-
 .../infra/model/StepExecutionContextResponse.java  |  15 +-
 .../infra/model/StepExecutionInfoResponse.java     | 127 ++--
 .../infra/model/StepExecutionProgressResponse.java |  12 +-
 .../infra/model/wrapper/JobExecutionData.java      | 118 ----
 .../infra/model/wrapper/StepExecutionData.java     | 133 ----
 .../apache/ambari/infra/rest/ApiDocResource.java   |  56 --
 .../ambari/infra/rest/JobExceptionMapper.java      |  28 +-
 .../org/apache/ambari/infra/rest/JobResource.java  |  56 +-
 .../src/main/resources/infra-manager-env.sh        |  16 +-
 .../src/main/resources/infra-manager.properties    |  69 +-
 .../src/main/resources/infraManager.sh             |  15 +-
 .../src/main/resources/log4j2.xml                  |   2 +-
 .../src/main/resources/swagger/swagger.html        | 161 ++---
 ...wordStoreTest.java => CompositeSecretTest.java} |  16 +-
 .../org/apache/ambari/infra/env/TestAppConfig.java | 128 ++++
 .../ambari/infra/job/InfraJobExecutionDAOIT.java   |  99 +++
 .../apache/ambari/infra/job/JobPropertiesTest.java |  56 --
 .../apache/ambari/infra/job/JobSchedulerTest.java  |  70 +-
 .../infra/job/archive/DocumentExporterTest.java    |  56 +-
 .../infra/job/archive/DocumentItemReaderTest.java  |  24 +-
 .../job/archive/FileNameSuffixFormatterTest.java   |  12 +-
 .../job/archive/LocalDocumentItemWriterTest.java   |  39 +-
 .../ambari/infra/job/archive/S3AccessCsvTest.java  |  70 --
 .../job/archive/SolrParametrizedStringTest.java    |  12 +-
 .../infra/job/archive/SolrPropertiesTest.java      |  34 +-
 .../infra/job/archive/SolrQueryBuilderTest.java    |  40 +-
 .../json/FsPermissionToStringConverterTest.java}   |  30 +-
 .../infra/model/ISO8601DateFormatterTest.java      |  50 ++
 ambari-infra/pom.xml                               |  11 +
 124 files changed, 2918 insertions(+), 3126 deletions(-)

diff --git a/ambari-infra/ambari-infra-manager-it/pom.xml b/ambari-infra/ambari-infra-manager-it/pom.xml
index f8f9aeb..e5a6339 100644
--- a/ambari-infra/ambari-infra-manager-it/pom.xml
+++ b/ambari-infra/ambari-infra-manager-it/pom.xml
@@ -32,9 +32,12 @@
 
   <properties>
     <jbehave.version>4.0.5</jbehave.version>
-    <failsafe-plugin.version>2.20</failsafe-plugin.version>
-    <docker.host>localhost</docker.host>
+    <failsafePlugin.version>3.0.0-M1</failsafePlugin.version>
+    <infraManager.docker.host>localhost</infraManager.docker.host>
     <stories.location>NONE</stories.location>
+    <okhttp.version>2.7.5</okhttp.version>
+    <generatedSourcesPath>${project.build.directory}/generated-sources</generatedSourcesPath>
+    <generatedSourcesJavaPath>main/java</generatedSourcesJavaPath>
   </properties>
 
   <dependencies>
@@ -44,9 +47,9 @@
       <version>${solr.version}</version>
     </dependency>
     <dependency>
-      <groupId>com.amazonaws</groupId>
-      <artifactId>aws-java-sdk-s3</artifactId>
-      <version>1.11.5</version>
+      <groupId>io.minio</groupId>
+      <artifactId>minio</artifactId>
+      <version>5.0.1</version>
     </dependency>
     <dependency>
       <groupId>commons-io</groupId>
@@ -164,6 +167,47 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <version>2.11.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <version>2.11.1</version>
+    </dependency>
+    <!-- swagger client related -->
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-annotations</artifactId>
+      <version>1.5.21</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>io.gsonfire</groupId>
+      <artifactId>gson-fire</artifactId>
+      <version>1.8.3</version>
+    </dependency>
+    <dependency>
+      <groupId>com.squareup.okhttp</groupId>
+      <artifactId>okhttp</artifactId>
+      <version>${okhttp.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.squareup.okhttp</groupId>
+      <artifactId>logging-interceptor</artifactId>
+      <version>${okhttp.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.annotation</groupId>
+      <artifactId>javax.annotation-api</artifactId>
+      <version>1.3.2</version>
+    </dependency>
   </dependencies>
 
   <build>
@@ -179,6 +223,53 @@
         <directory>src/test/resources</directory>
       </testResource>
     </testResources>
+    <plugins>
+      <plugin>
+        <groupId>io.swagger</groupId>
+        <artifactId>swagger-codegen-maven-plugin</artifactId>
+        <version>2.3.1</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>generate</goal>
+            </goals>
+            <configuration>
+              <inputSpec>${project.parent.basedir}/ambari-infra-manager/src/main/resources/swagger/swagger.yaml</inputSpec>
+              <language>java</language>
+              <generateApiTests>false</generateApiTests>
+              <generateModelTests>false</generateModelTests>
+              <output>${generatedSourcesPath}</output>
+              <configOptions>
+                <sourceFolder>${generatedSourcesJavaPath}</sourceFolder>
+                <apiPackage>org.apache.ambari.infra.client.api</apiPackage>
+                <modelPackage>org.apache.ambari.infra.client.model</modelPackage>
+                <invokerPackage>org.apache.ambari.infra.client.invoker</invokerPackage>
+                <dateLibrary>java8</dateLibrary>
+              </configOptions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <id>add-generated-source</id>
+            <phase>initialize</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${generatedSourcesPath}/${generatedSourcesJavaPath}</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
   </build>
 
   <profiles>
@@ -194,7 +285,7 @@
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>
             <artifactId>maven-failsafe-plugin</artifactId>
-            <version>${failsafe-plugin.version}</version>
+            <version>${failsafePlugin.version}</version>
             <executions>
               <execution>
                 <id>run-integration-tests</id>
@@ -209,7 +300,7 @@
                   </includes>
                   <systemPropertyVariables>
                     <log4j.configuration>file:${project.build.testOutputDirectory}/log4j.properties</log4j.configuration>
-                    <docker.host>${docker.host}</docker.host>
+                    <docker.host>${infraManager.docker.host}</docker.host>
                     <backend.stories.location>${stories.location}</backend.stories.location>
                   </systemPropertyVariables>
                 </configuration>
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
index 0118c76..bce3c7e 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
@@ -18,114 +18,59 @@
  */
 package org.apache.ambari.infra;
 
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.IOUtils;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.List;
 
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Map;
+import org.apache.ambari.infra.client.api.JobsApi;
+import org.apache.ambari.infra.client.invoker.ApiClient;
+import org.apache.ambari.infra.client.invoker.ApiException;
+import org.apache.ambari.infra.client.model.JobExecutionInfoResponse;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-// TODO: use swagger
-public class InfraClient implements AutoCloseable {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraClient.class);
-
-  private final CloseableHttpClient httpClient;
-  private final URI baseUrl;
+public class InfraClient {
+  private final JobsApi jobsApi;
 
   public InfraClient(String baseUrl) {
+    ApiClient apiClient = new ApiClient().setBasePath(baseUrl);
+    apiClient.setUsername("admin");
+    apiClient.setPassword("admin");
+    this.jobsApi = new JobsApi(apiClient);
+  }
+
+  public List<String> getJobs() {
     try {
-      this.baseUrl = new URI(baseUrl);
-    } catch (URISyntaxException e) {
+      return jobsApi.getAllJobNames();
+    } catch (ApiException e) {
       throw new RuntimeException(e);
     }
-    httpClient = HttpClientBuilder.create().setRetryHandler(new DefaultHttpRequestRetryHandler(0, false)).build();
   }
 
-  @Override
-  public void close() throws Exception {
-    httpClient.close();
-  }
-
-  // TODO: return job data
-  public void getJobs() {
-    execute(new HttpGet(baseUrl));
-  }
-
-  private HttpResponse execute(HttpRequestBase post) {
-    try (CloseableHttpResponse response = httpClient.execute(post)) {
-      String responseBodyText = IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
-      int statusCode = response.getStatusLine().getStatusCode();
-      LOG.info("Response code {} body {} ", statusCode, responseBodyText);
-      if (!(200 <= statusCode && statusCode <= 299))
-        throw new RuntimeException("Error while executing http request: " + responseBodyText);
-      return new HttpResponse(statusCode, responseBodyText);
-    } catch (ClientProtocolException e) {
+  public JobExecutionInfoResponse startJob(String jobName, String parameters) {
+    try {
+      return jobsApi.startJob(jobName, parameters);
+    } catch (ApiException e) {
       throw new RuntimeException(e);
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
     }
   }
 
-  public JobExecutionInfo startJob(String jobName, String parameters) {
-    URIBuilder uriBuilder = new URIBuilder(baseUrl);
-    uriBuilder.setScheme("http");
-    uriBuilder.setPath(uriBuilder.getPath() + "/" + jobName);
-    if (!isBlank(parameters))
-      uriBuilder.addParameter("params", parameters);
+  public void restartJob(String jobName, long jobId) {
     try {
-      String responseText = execute(new HttpPost(uriBuilder.build())).getBody();
-      Map<String, Object> responseContent = new ObjectMapper().readValue(responseText, new TypeReference<HashMap<String,Object>>() {});
-      return new JobExecutionInfo(responseContent.get("jobId").toString(), ((Map)responseContent.get("jobExecutionData")).get("id").toString());
-    } catch (URISyntaxException | JsonParseException | JsonMappingException e) {
+      jobsApi.restartJobInstance(jobName, jobId, "RESTART");
+    } catch (ApiException e) {
       throw new RuntimeException(e);
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
     }
   }
 
-  public void restartJob(String jobName, String jobId) {
-    URIBuilder uriBuilder = new URIBuilder(baseUrl);
-    uriBuilder.setScheme("http");
-    uriBuilder.setPath(String.format("%s/%s/%s/executions", uriBuilder.getPath(), jobName, jobId));
-    uriBuilder.addParameter("operation", "RESTART");
+  public void stopJob(long jobExecutionId) {
     try {
-      HttpResponse httpResponse = execute(new HttpPost(uriBuilder.build()));
-      if (httpResponse.getCode() != 200)
-        throw new RuntimeException(httpResponse.getBody());
-    } catch (URISyntaxException e) {
+      jobsApi.stopOrAbandonJobExecution(jobExecutionId, "STOP");
+    } catch (ApiException e) {
       throw new RuntimeException(e);
     }
   }
 
-  public void stopJob(String jobExecutionId) {
-    URIBuilder uriBuilder = new URIBuilder(baseUrl);
-    uriBuilder.setScheme("http");
-    uriBuilder.setPath(String.format("%s/executions/%s", uriBuilder.getPath(), jobExecutionId));
-    uriBuilder.addParameter("operation", "STOP");
+  public boolean isRunning(String jobName) {
     try {
-      execute(new HttpDelete(uriBuilder.build()));
-    } catch (URISyntaxException e) {
+      return !jobsApi.getExecutionIdsByJobName(jobName).isEmpty();
+    } catch (ApiException e) {
       throw new RuntimeException(e);
     }
   }
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java
new file mode 100644
index 0000000..fe4ff7e
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/S3Client.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UncheckedIOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import io.minio.MinioClient;
+import io.minio.Result;
+import io.minio.messages.Item;
+
+public class S3Client {
+  private final MinioClient s3client;
+  private final String bucket;
+
+  public S3Client(String host, int port, String bucket) {
+    try {
+      s3client = new MinioClient(String.format("http://%s:%d", host, port), "remote-identity", "remote-credential");
+      this.bucket = bucket;
+    }
+    catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  public void createBucket() {
+    try {
+      if (!s3client.bucketExists(bucket))
+        s3client.makeBucket(bucket);
+    }
+    catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  public void putObject(String key, InputStream inputStream, long length) {
+    try {
+      s3client.putObject(bucket, key, inputStream, length, "application/octet-stream");
+    }
+    catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  public void putObject(String key, byte[] bytes) {
+    try (ByteArrayInputStream inputStream = new ByteArrayInputStream("anything".getBytes())) {
+      putObject(key, inputStream, bytes.length);
+    } catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+
+  public List<String> listObjectKeys() {
+    try {
+      List<String> keys = new ArrayList<>();
+      for (Result<Item> item : s3client.listObjects(bucket)) {
+        keys.add(item.get().objectName());
+      }
+      return keys;
+    }
+    catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  public List<String> listObjectKeys(String text) {
+    try {
+      List<String> keys = new ArrayList<>();
+      for (Result<Item> item : s3client.listObjects(bucket)) {
+        String objectName = item.get().objectName();
+        if (objectName.contains(text))
+          keys.add(objectName);
+      }
+      return keys;
+    }
+    catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  public void deleteObject(String key) {
+    try {
+      s3client.removeObject(bucket, key);
+    }
+    catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  public InputStream getObject(String key) {
+    try {
+      return s3client.getObject(bucket, key);
+    }
+    catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
index 1ffdb2a..f149cd8 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
@@ -31,17 +31,17 @@ import org.apache.http.client.methods.HttpGet;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrInputDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class Solr {
-  private static final Logger LOG = LoggerFactory.getLogger(Solr.class);
+  private static final Logger logger = LogManager.getLogger(Solr.class);
   public static final String AUDIT_LOGS_COLLECTION = "audit_logs";
   public static final String HADOOP_LOGS_COLLECTION = "hadoop_logs";
   private static final int SOLR_PORT = 8983;
@@ -55,7 +55,7 @@ public class Solr {
 
   public Solr(String configSetPath) {
     this.configSetPath = configSetPath;
-    this.solrClient = new LBHttpSolrClient.Builder().withBaseSolrUrls(String.format("http://%s:%d/solr/%s_shard1_replica1",
+    this.solrClient = new LBHttpSolrClient.Builder().withBaseSolrUrls(String.format("http://%s:%d/solr/%s_shard1_replica_n1",
             getDockerHost(),
             SOLR_PORT,
             AUDIT_LOGS_COLLECTION)).build();
@@ -85,8 +85,8 @@ public class Solr {
   }
 
   public void createSolrCollection(String collectionName) {
-    LOG.info("Creating collection");
-    runCommand(new String[]{"docker", "exec", "docker_solr_1", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"});
+    logger.info("Creating collection");
+    runCommand(new String[]{"docker", "exec", "solr", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"});
   }
 
   public QueryResponse query(SolrQuery query) {
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
index f48e107..8f19ce9 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
@@ -25,11 +25,11 @@ import java.util.function.BooleanSupplier;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TestUtil {
-  private static final Logger LOG = LoggerFactory.getLogger(TestUtil.class);
+  private static final Logger logger = LogManager.getLogger(TestUtil.class);
 
   public static void doWithin(int sec, String actionName, BooleanSupplier predicate) {
     doWithin(sec, actionName, () -> {
@@ -54,7 +54,7 @@ public class TestUtil {
         throw new AssertionError(String.format("Unable to perform action '%s' within %d seconds", actionName, sec), exception);
       }
       else {
-        LOG.info("Performing action '{}' failed. retrying...", actionName);
+        logger.info("Performing action '{}' failed. retrying...", actionName);
       }
       try {
         Thread.sleep(1000);
@@ -71,10 +71,10 @@ public class TestUtil {
 
   public static void runCommand(String[] command) {
     try {
-      LOG.info("Exec command: {}", StringUtils.join(command, " "));
+      logger.info("Exec command: {}", StringUtils.join(command, " "));
       Process process = Runtime.getRuntime().exec(command);
       String stdout = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
-      LOG.info("Exec command result {}", stdout);
+      logger.info("Exec command result {}", stdout);
     } catch (Exception e) {
       throw new RuntimeException("Error during execute shell command: ", e);
     }
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
index 3016d67..6f17442 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
@@ -30,14 +30,14 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.ambari.infra.Solr;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class MetricsIT {
-  private static final Logger LOG = LoggerFactory.getLogger(MetricsIT.class);
+  private static final Logger logger = LogManager.getLogger(MetricsIT.class);
 
   private static MockMetricsServer metricsServer;
   private static String shellScriptLocation;
@@ -49,10 +49,10 @@ public class MetricsIT {
 
     // TODO: use the same containers as ambari-infra-manager-it
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh";
-    LOG.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
+    logger.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
-    Solr solr = new Solr("/usr/lib/ambari-infra-solr/server/solr");
+    Solr solr = new Solr();
     solr.waitUntilSolrIsUp();
     solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
 
@@ -61,8 +61,8 @@ public class MetricsIT {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
-    LOG.info("shutdown containers");
+  public static void tearDown() {
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
 
@@ -74,10 +74,10 @@ public class MetricsIT {
       Thread.sleep(1000);
       if (currentTimeMillis() - start > 30 * 1000)
         break;
-      LOG.info("Checking any metrics arrived...");
+      logger.info("Checking any metrics arrived...");
     }
 
-    metricsServer.getNotReceivedMetrics().forEach(metric -> LOG.info("Metric not received: {}", metric));
+    metricsServer.getNotReceivedMetrics().forEach(metric -> logger.info("Metric not received: {}", metric));
     assertThat(metricsServer.getNotReceivedMetrics().isEmpty(), is(true));
   }
 
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
index 9d2734f..7197446 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
@@ -27,8 +27,8 @@ import static spark.Spark.post;
 import java.util.Set;
 import java.util.concurrent.ConcurrentSkipListSet;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.google.gson.Gson;
 
@@ -37,7 +37,7 @@ import spark.Response;
 import spark.servlet.SparkApplication;
 
 public class MockMetricsServer implements SparkApplication {
-  private static final Logger LOG = LoggerFactory.getLogger(MockMetricsServer.class);
+  private static final Logger logger = LogManager.getLogger(MockMetricsServer.class);
   private static final String HOST_NAME = "metrics_collector";
 
   private Set<String> expectedMetrics;
@@ -51,14 +51,14 @@ public class MockMetricsServer implements SparkApplication {
   }
 
   private Object queryState(Request request, Response response) {
-    LOG.info("Sending hostname {}", HOST_NAME);
+    logger.info("Sending hostname {}", HOST_NAME);
     response.type("application/json");
     return new Gson().toJson(singletonList(HOST_NAME));
   }
 
   private Object logBody(Request req, Response resp) {
     String body = req.body();
-    LOG.info("Incoming metrics {}", body);
+    logger.info("Incoming metrics {}", body);
 
     expectedMetrics.removeIf(body::contains);
 
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
index f219ce5..985fda5 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
@@ -25,53 +25,45 @@ import static org.apache.ambari.infra.TestUtil.getDockerHost;
 import static org.apache.ambari.infra.TestUtil.runCommand;
 
 import java.io.File;
-import java.io.IOException;
 import java.net.URL;
 import java.time.OffsetDateTime;
 import java.util.Date;
+import java.util.List;
 
 import org.apache.ambari.infra.InfraClient;
+import org.apache.ambari.infra.S3Client;
 import org.apache.ambari.infra.Solr;
 import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.common.SolrInputDocument;
 import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.BeforeStories;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.ListObjectsRequest;
-import com.amazonaws.services.s3.model.ObjectListing;
+import spark.resource.ClassPathResource;
 
 public abstract class AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractInfraSteps.class);
+  private static final Logger logger = LogManager.getLogger(AbstractInfraSteps.class);
 
   private static final int INFRA_MANAGER_PORT = 61890;
   private static final int FAKE_S3_PORT = 4569;
-  private static final int HDFS_PORT = 9000;
   protected static final String S3_BUCKET_NAME = "testbucket";
   private String ambariFolder;
   private String shellScriptLocation;
-  private String dockerHost;
-  private AmazonS3Client s3client;
+  private S3Client s3client;
   private int documentId = 0;
   private Solr solr;
+  private InfraClient infraClient;
 
   public InfraClient getInfraClient() {
-    return new InfraClient(String.format("http://%s:%d/api/v1/jobs", dockerHost, INFRA_MANAGER_PORT));
+    return infraClient;
   }
 
   public Solr getSolr() {
     return solr;
   }
 
-  public AmazonS3Client getS3client() {
+  public S3Client getS3client() {
     return s3client;
   }
 
@@ -79,6 +71,10 @@ public abstract class AbstractInfraSteps {
     return ambariFolder + "/ambari-infra/ambari-infra-manager/docker/test-out";
   }
 
+  public String getInfraManagerConfDir() {
+    return ambariFolder + "/ambari-infra/ambari-infra-manager/target/package/conf";
+  }
+
   @BeforeStories
   public void initDockerContainer() throws Exception {
     System.setProperty("HADOOP_USER_NAME", "root");
@@ -86,14 +82,22 @@ public abstract class AbstractInfraSteps {
     URL location = AbstractInfraSteps.class.getProtectionDomain().getCodeSource().getLocation();
     ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent();
 
-    LOG.info("Clean local data folder {}", getLocalDataFolder());
-    FileUtils.cleanDirectory(new File(getLocalDataFolder()));
+    String localDataFolder = getLocalDataFolder();
+    if (new File(localDataFolder).exists()) {
+      logger.info("Clean local data folder {}", localDataFolder);
+      FileUtils.cleanDirectory(new File(localDataFolder));
+    }
+
+    logger.info("Copy resources");
+    FileUtils.copyDirectory(new ClassPathResource("conf").getFile(), new File(getInfraManagerConfDir()));
 
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh";
-    LOG.info("Create new docker container for testing Ambari Infra Manager ...");
+    logger.info("Create new docker container for testing Ambari Infra Manager ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
-    dockerHost = getDockerHost();
+    String dockerHost = getDockerHost();
+
+    this.infraClient = new InfraClient(String.format("http://%s:%d/api/v1", dockerHost, INFRA_MANAGER_PORT));
 
     solr = new Solr();
     solr.waitUntilSolrIsUp();
@@ -101,22 +105,20 @@ public abstract class AbstractInfraSteps {
     solr.createSolrCollection(AUDIT_LOGS_COLLECTION);
     solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
 
-    LOG.info("Initializing s3 client");
-    s3client = new AmazonS3Client(new BasicAWSCredentials("remote-identity", "remote-credential"));
-    s3client.setEndpoint(String.format("http://%s:%d", dockerHost, FAKE_S3_PORT));
-    s3client.createBucket(S3_BUCKET_NAME);
+    logger.info("Initializing s3 client");
+    s3client = new S3Client(dockerHost, FAKE_S3_PORT, S3_BUCKET_NAME);
+    s3client.createBucket();
 
     checkInfraManagerReachable();
   }
 
-  private void checkInfraManagerReachable() throws Exception {
-    try (InfraClient httpClient = getInfraClient()) {
-      doWithin(30, "Start Ambari Infra Manager", httpClient::getJobs);
-      LOG.info("Ambari Infra Manager is up and running");
-    }
+  private void checkInfraManagerReachable() {
+    InfraClient infraClient = getInfraClient();
+    doWithin(30, "Start Ambari Infra Manager", infraClient::getJobs);
+    logger.info("Ambari Infra Manager is up and running");
   }
 
-  protected void addDocument(OffsetDateTime logtime) {
+  protected SolrInputDocument addDocument(OffsetDateTime logtime) {
     SolrInputDocument solrInputDocument = new SolrInputDocument();
     solrInputDocument.addField("logType", "HDFSAudit");
     solrInputDocument.addField("cluster", "cl1");
@@ -139,7 +141,6 @@ public abstract class AbstractInfraSteps {
     solrInputDocument.addField("level", "INFO");
     solrInputDocument.addField("resource", "/ats/active");
     solrInputDocument.addField("ip", "172.18.0.2");
-    solrInputDocument.addField("evtTime", "2017-12-08T10:23:16.452Z");
     solrInputDocument.addField("req_caller_id", "HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f");
     solrInputDocument.addField("repoType", 1);
     solrInputDocument.addField("enforcer", "hadoop-acl");
@@ -147,37 +148,21 @@ public abstract class AbstractInfraSteps {
     solrInputDocument.addField("message_md5", "-6778765776916226588");
     solrInputDocument.addField("event_md5", "5627261521757462732");
     solrInputDocument.addField("logtime", new Date(logtime.toInstant().toEpochMilli()));
+    solrInputDocument.addField("evtTime", new Date(logtime.toInstant().toEpochMilli()));
     solrInputDocument.addField("_ttl_", "+7DAYS");
-    solrInputDocument.addField("_expire_at_", "2017-12-15T10:23:19.106Z");
+    solrInputDocument.addField("_expire_at_", new Date(logtime.plusDays(7).toInstant().toEpochMilli()));
     solr.add(solrInputDocument);
+    return solrInputDocument;
   }
 
   @AfterStories
   public void shutdownContainers() throws Exception {
     Thread.sleep(2000); // sync with s3 server
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    ObjectListing objectListing = getS3client().listObjects(listObjectsRequest);
-    LOG.info("Found {} files on s3.", objectListing.getObjectSummaries().size());
-    objectListing.getObjectSummaries().forEach(s3ObjectSummary ->  LOG.info("Found file on s3 with key {}", s3ObjectSummary.getKey()));
-
-    LOG.info("Listing files on hdfs.");
-    try (FileSystem fileSystem = getHdfs()) {
-      int count = 0;
-      RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path("/test_audit_logs"), true);
-      while (it.hasNext()) {
-        LOG.info("Found file on hdfs with name {}", it.next().getPath().getName());
-        ++count;
-      }
-      LOG.info("{} files found on hfds", count);
-    }
+    List<String> objectKeys = getS3client().listObjectKeys();
+    logger.info("Found {} files on s3.", objectKeys.size());
+    objectKeys.forEach(objectKey ->  logger.info("Found file on s3 with key {}", objectKey));
 
-    LOG.info("shutdown containers");
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
-
-  protected FileSystem getHdfs() throws IOException {
-    Configuration conf = new Configuration();
-    conf.set("fs.defaultFS", String.format("hdfs://%s:%d/", dockerHost, HDFS_PORT));
-    return FileSystem.get(conf);
-  }
 }
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
index d84c23f..0825290 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
+++ b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
@@ -18,53 +18,54 @@
  */
 package org.apache.ambari.infra.steps;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static java.util.Objects.requireNonNull;
 import static org.apache.ambari.infra.OffsetDateTimeConverter.SOLR_DATETIME_FORMATTER;
 import static org.apache.ambari.infra.TestUtil.doWithin;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.hasProperty;
 import static org.hamcrest.core.IsCollectionContaining.hasItem;
 import static org.junit.Assert.assertThat;
 
-import java.io.ByteArrayInputStream;
+import java.io.BufferedReader;
 import java.io.File;
-import java.io.IOException;
-import java.io.UncheckedIOException;
+import java.io.InputStreamReader;
 import java.time.Duration;
 import java.time.OffsetDateTime;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.ambari.infra.InfraClient;
-import org.apache.ambari.infra.JobExecutionInfo;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.ambari.infra.S3Client;
+import org.apache.ambari.infra.client.model.JobExecutionInfoResponse;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.jbehave.core.annotations.AfterScenario;
 import org.jbehave.core.annotations.Given;
 import org.jbehave.core.annotations.Then;
 import org.jbehave.core.annotations.When;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.ListObjectsRequest;
-import com.amazonaws.services.s3.model.ObjectListing;
-import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class ExportJobsSteps extends AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(ExportJobsSteps.class);
+  private static final Logger logger = LogManager.getLogger(ExportJobsSteps.class);
+  private Set<String> documentIds = new HashSet<>();
 
-  private Map<String, JobExecutionInfo> launchedJobs = new HashMap<>();
+  private Map<String, JobExecutionInfoResponse> launchedJobs = new HashMap<>();
 
   @Given("$count documents in solr")
   public void addDocuments(int count) {
     OffsetDateTime intervalEnd = OffsetDateTime.now();
+    documentIds.clear();
     for (int i = 0; i < count; ++i) {
-      addDocument(intervalEnd.minusMinutes(i % (count / 10)));
+      documentIds.add(addDocument(intervalEnd.minusMinutes(i % (count / 10))).get("id").getValue().toString());
     }
     getSolr().commit();
   }
@@ -73,16 +74,16 @@ public class ExportJobsSteps extends AbstractInfraSteps {
   public void addDocuments(long count, OffsetDateTime startLogtime, OffsetDateTime endLogtime) {
     Duration duration = Duration.between(startLogtime, endLogtime);
     long increment = duration.toNanos() / count;
-    for (int i = 0; i < count; ++i)
-      addDocument(startLogtime.plusNanos(increment * i));
+    documentIds.clear();
+    for (int i = 0; i < count; ++i) {
+      documentIds.add(addDocument(startLogtime.plusNanos(increment * i)).get("id").getValue().toString());
+    }
     getSolr().commit();
   }
 
   @Given("a file on s3 with key $key")
-  public void addFileToS3(String key) throws Exception {
-    try (ByteArrayInputStream inputStream = new ByteArrayInputStream("anything".getBytes())) {
-      getS3client().putObject(S3_BUCKET_NAME, key, inputStream, new ObjectMetadata());
-    }
+  public void addFileToS3(String key) {
+    getS3client().putObject(key, "anything".getBytes());
   }
 
   @When("start $jobName job")
@@ -93,72 +94,51 @@ public class ExportJobsSteps extends AbstractInfraSteps {
   @When("start $jobName job with parameters $parameters after $waitSec seconds")
   public void startJob(String jobName, String parameters, int waitSec) throws Exception {
     Thread.sleep(waitSec * 1000);
-    try (InfraClient httpClient = getInfraClient()) {
-      JobExecutionInfo jobExecutionInfo = httpClient.startJob(jobName, parameters);
-      LOG.info("Job {} started: {}", jobName, jobExecutionInfo);
-      launchedJobs.put(jobName, jobExecutionInfo);
-    }
+    JobExecutionInfoResponse jobExecutionInfo = getInfraClient().startJob(jobName, parameters);
+    logger.info("Job {} started: {}", jobName, jobExecutionInfo);
+    launchedJobs.put(jobName, jobExecutionInfo);
   }
 
   @When("restart $jobName job within $waitSec seconds")
   public void restartJob(String jobName, int waitSec) {
-    doWithin(waitSec, "Restarting job " + jobName, () -> {
-      try (InfraClient httpClient = getInfraClient()) {
-        httpClient.restartJob(jobName, launchedJobs.get(jobName).getJobId());
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-    });
+    doWithin(waitSec, "Restarting job " + jobName, () ->
+            getInfraClient().restartJob(jobName, launchedJobs.get(jobName).getJobInstanceId()));
   }
 
   @When("stop job $jobName after at least $count file exists in s3 with filename containing text $text within $waitSec seconds")
   public void stopJob(String jobName, int count, String text, int waitSec) throws Exception {
-    AmazonS3Client s3Client = getS3client();
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
-            && fileCountOnS3(text, s3Client, listObjectsRequest) > count);
-
-    try (InfraClient httpClient = getInfraClient()) {
-      httpClient.stopJob(launchedJobs.get(jobName).getExecutionId());
-    }
+    S3Client s3Client = getS3client();
+    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.listObjectKeys(text).size() > count);
+    InfraClient infraClient = getInfraClient();
+    infraClient.stopJob(launchedJobs.get(jobName).getJobExecutionId());
+    doWithin(waitSec, String.format("Wait for job %s stops", jobName), () -> infraClient.isRunning(jobName));
   }
 
   @When("delete file with key $key from s3")
   public void deleteFileFromS3(String key) {
-    getS3client().deleteObject(S3_BUCKET_NAME, key);
+    getS3client().deleteObject(key);
   }
 
   @Then("Check filenames contains the text $text on s3 server after $waitSec seconds")
   public void checkS3After(String text, int waitSec) {
-    AmazonS3Client s3Client = getS3client();
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
-            && !s3Client.listObjects(listObjectsRequest).getObjectSummaries().isEmpty());
+    S3Client s3Client = getS3client();
+    doWithin(waitSec, "check uploaded files to s3", () -> !s3Client.listObjectKeys().isEmpty());
 
-    ObjectListing objectListing = s3Client.listObjects(listObjectsRequest);
-    assertThat(objectListing.getObjectSummaries(), hasItem(hasProperty("key", containsString(text))));
+    List<String> objectKeys = s3Client.listObjectKeys(text);
+    assertThat(objectKeys, hasItem(containsString(text)));
   }
 
   @Then("Check $count files exists on s3 server with filenames containing the text $text after $waitSec seconds")
   public void checkNumberOfFilesOnS3(long count, String text, int waitSec) {
-    AmazonS3Client s3Client = getS3client();
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
-            && fileCountOnS3(text, s3Client, listObjectsRequest) == count);
-  }
-
-  private long fileCountOnS3(String text, AmazonS3Client s3Client, ListObjectsRequest listObjectsRequest) {
-    return s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream()
-    .filter(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text))
-    .count();
+    S3Client s3Client = getS3client();
+    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.listObjectKeys(text).size() == count);
   }
 
   @Then("Less than $count files exists on s3 server with filenames containing the text $text after $waitSec seconds")
   public void checkLessThanFileExistsOnS3(long count, String text, int waitSec) {
-    AmazonS3Client s3Client = getS3client();
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) && between(
-            fileCountOnS3(text, s3Client, listObjectsRequest), 1L, count - 1L));
+    S3Client s3Client = getS3client();
+    doWithin(waitSec, "check uploaded files to s3", () -> between(
+            s3Client.listObjectKeys(text).size(), 1L, count - 1L));
   }
 
   private boolean between(long count, long from, long to) {
@@ -167,10 +147,9 @@ public class ExportJobsSteps extends AbstractInfraSteps {
 
   @Then("No file exists on s3 server with filenames containing the text $text")
   public void fileNotExistOnS3(String text) {
-    AmazonS3Client s3Client = getS3client();
-    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
-    assertThat(s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream()
-            .anyMatch(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text)), is(false));
+    S3Client s3Client = getS3client();
+    assertThat(s3Client.listObjectKeys().stream()
+            .anyMatch(objectKey -> objectKey.contains(text)), is(false));
   }
 
   @Then("solr contains $count documents between $startLogtime and $endLogtime")
@@ -193,30 +172,10 @@ public class ExportJobsSteps extends AbstractInfraSteps {
     return getSolr().query(query).getResults().isEmpty();
   }
 
-  @Then("Check $count files exists on hdfs with filenames containing the text $text in the folder $path after $waitSec seconds")
-  public void checkNumberOfFilesOnHdfs(int count, String text, String path, int waitSec) throws Exception {
-    try (FileSystem fileSystem = getHdfs()) {
-      doWithin(waitSec, "check uploaded files to hdfs", () -> {
-        try {
-          int fileCount = 0;
-          RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path(path), true);
-          while (it.hasNext()) {
-            if (it.next().getPath().getName().contains(text))
-              ++fileCount;
-          }
-          return fileCount == count;
-        }
-        catch (IOException e) {
-          throw new UncheckedIOException(e);
-        }
-      });
-    }
-  }
-
   @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName")
   public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) {
-    File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId()));
-    LOG.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
+    File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", Long.toString(launchedJobs.get(jobName).getJobInstanceId())));
+    logger.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
     doWithin(5, "Destination directory exists", destinationDirectory::exists);
 
     File[] files = requireNonNull(destinationDirectory.listFiles(),
@@ -225,4 +184,45 @@ public class ExportJobsSteps extends AbstractInfraSteps {
             .filter(file -> file.getName().contains(text))
             .count(), is(count));
   }
+
+  private static final ObjectMapper json = new ObjectMapper();
+
+  @Then("Check the files $fileNamePart contains the archived documents")
+  public void checkStoredDocumentIds(String fileNamePart) throws Exception {
+    S3Client s3Client = getS3client();
+    int size = documentIds.size();
+    Set<String> storedDocumentIds = new HashSet<>();
+    for (String objectKey : s3Client.listObjectKeys(fileNamePart)) {
+      try (BufferedReader reader = new BufferedReader(new InputStreamReader(new BZip2CompressorInputStream(s3Client.getObject(objectKey)), UTF_8))) {
+        String line;
+        while ((line = reader.readLine()) != null) {
+          Map<String, Object> document = json.readValue(line, new TypeReference<HashMap<String, Object>>() {});
+          String id = document.get("id").toString();
+          storedDocumentIds.add(id);
+          documentIds.remove(id);
+        }
+      }
+    }
+    assertThat(documentIds.size(), is(0));
+    assertThat(storedDocumentIds.size(), is(size));
+  }
+
+  @AfterScenario
+  public void waitForJobStops() throws InterruptedException {
+    InfraClient infraClient = getInfraClient();
+    doWithin(20, "Stop all launched jobs", () -> {
+      int runningJobCount = 0;
+      for (String jobName : launchedJobs.keySet()) {
+        if (launchedJobs.get(jobName) == null)
+          continue;
+        if (!infraClient.isRunning(jobName)) {
+          launchedJobs.put(jobName, null);
+        }
+        else {
+          ++runningJobCount;
+        }
+      }
+      return runningJobCount == 0;
+    });
+  }
 }
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/core-site.xml b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/core-site.xml
new file mode 100644
index 0000000..1148c85
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/core-site.xml
@@ -0,0 +1,42 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>fs.defaultFS</name>
+    <value>s3a://testbucket</value>
+  </property>
+  <property>
+    <name>fs.s3a.endpoint</name>
+    <value>http://fakes3:4569</value>
+  </property>
+  <property>
+    <name>fs.s3a.access.key</name>
+    <value>MyAccessKey</value>
+  </property>
+  <property>
+    <name>fs.s3a.secret.key</name>
+    <value>MySecretKey</value>
+  </property>
+  <property>
+    <name>fs.s3a.path.style.access</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>fs.s3a.multiobjectdelete.enable</name>
+    <value>false</value>
+  </property>
+</configuration>
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml
new file mode 100644
index 0000000..b529b91
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/hdfs-site.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property><name>dfs.namenode.rpc-address</name><value>namenode:9000</value></property>
+  <property><name>dfs.replication</name><value>1</value></property>
+</configuration>
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties
similarity index 61%
copy from ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
copy to ambari-infra/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties
index a0712ba..586a0fa 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ b/ambari-infra/ambari-infra-manager-it/src/test/resources/conf/infra-manager.properties
@@ -17,10 +17,10 @@ infra-manager.batch.db.file=job-repository.db
 infra-manager.batch.db.init=false
 infra-manager.batch.db.username=admin
 infra-manager.batch.db.password=admin
-management.security.enabled=false
-management.health.solr.enabled=false
 infra-manager.server.data.folder=/tmp/ambariInfraManager
+infra-manager.admin-user.password=admin
 
+# Archive Service Logs
 infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
@@ -28,47 +28,39 @@ infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logt
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
+infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL
-infra-manager.jobs.solr_data_archiving.archive_service_logs.local_destination_directory=/tmp/ambariInfraManager
+infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archives/service_logs
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_file_permission=644
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_service_logs.ttl=PT24H
+infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=false
 infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 * * * * ?
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.intervalEndDelta=PT24H
+# Archive Audit Logs
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=logtime:[${start} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=evtTime:[${start} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(evtTime:${evtTime} AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=evtTime
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime} TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3
-# TODO: logtime may not be enough: The same filename can be generated when more than write_block_size count docs has the same logtime value
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=evtTime
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/test_audit_logs
-#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=<any>.csv
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569
-# TODO: configure ranger audit logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.zoo_keeper_connection_string=zookeeper:2181
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.read_block_size=100
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.write_block_size=150
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.file_name_suffix_column=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.destination_directory_path=/tmp/ambariInfraManager
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.collection=hadoop_logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.query_text=logtime:[* TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.filter_query_text=(logtime:"${logtime}" AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[0]=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[1]=id
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/archives/audit_logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_file_permission=644
+# Delete Audit Logs
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=logtime
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=evtTime
+# Job data clean up
+infra-manager.jobs.clean-up.ttl=PT240H
+infra-manager.jobs.clean-up.scheduling.enabled=true
+infra-manager.jobs.clean-up.scheduling.cron=0 * * * * ?
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties b/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j2-test.properties
similarity index 57%
rename from ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties
rename to ambari-infra/ambari-infra-manager-it/src/test/resources/log4j2-test.properties
index 956bc63..4e488fd 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties
+++ b/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j2-test.properties
@@ -9,8 +9,17 @@
 #   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
-log4j.rootLogger=INFO, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.Target=System.out
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
\ No newline at end of file
+
+status = error
+dest = err
+name = PropertiesConfig
+
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+appender.console.filter.threshold.type = ThresholdFilter
+appender.console.filter.threshold.level = info
+
+rootLogger.level = info
+rootLogger.appenderRef.stdout.ref = STDOUT
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story b/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
index 122a634..bfa9e4b 100644
--- a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
+++ b/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
@@ -9,29 +9,31 @@ Scenario: Exporting 10 documents using writeBlockSize=3 produces 4 files
 
 Given 10 documents in solr with logtime from 2010-10-09T05:00:00.000Z to 2010-10-09T20:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-10-09T00:00:00.000Z,end=2010-10-11T00:00:00.000Z after 2 seconds
-Then Check 4 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-10-09 after 20 seconds
+Then Check 4 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2010-10-09 after 20 seconds
 And solr does not contain documents between 2010-10-09T05:00:00.000Z and 2010-10-09T20:00:00.000Z after 5 seconds
+And Check the files archives/audit_logs/audit_logs_-_2010-10-09 contains the archived documents
 
 
 Scenario: Running archiving job with a bigger start value than end value exports and deletes 0 documents
 
 Given 10 documents in solr with logtime from 2010-01-01T05:00:00.000Z to 2010-01-04T05:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-01-03T05:00:00.000Z,end=2010-01-02T05:00:00.000Z after 2 seconds
-Then No file exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-01-0
+Then No file exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2010-01-0
 And solr contains 10 documents between 2010-01-01T05:00:00.000Z and 2010-01-04T05:00:00.000Z
 
 
 Scenario: Archiving job fails when part of the data is exported. After resolving the issue and restarting the job exports the rest of the data.
 
 Given 200 documents in solr with logtime from 2011-10-09T05:00:00.000Z to 2011-10-09T20:00:00.000Z
-And a file on s3 with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz
+And a file on s3 with key archives/audit_logs/audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2
 When start archive_audit_logs job with parameters writeBlockSize=20,start=2010-11-09T00:00:00.000Z,end=2011-10-11T00:00:00.000Z after 2 seconds
-Then Check 3 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
+Then Check 3 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T07:59:59.999Z after 5 seconds
-When delete file with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz from s3
+When delete file with key archives/audit_logs/audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2 from s3
 And restart archive_audit_logs job within 2 seconds
-Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
+Then Check 10 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T20:00:00.000Z after 5 seconds
+And Check the files archives/audit_logs/audit_logs_-_2011-10-09 contains the archived documents
 
 
 Scenario: After Deleting job deletes documents from solr no document found in the specified interval
@@ -41,14 +43,6 @@ When start delete_audit_logs job with parameters start=2012-10-09T05:00:00.000Z,
 Then solr does not contain documents between 2012-10-09T05:00:00.000Z and 2012-10-09T20:00:00.000Z after 5 seconds
 
 
-Scenario: Archiving documents to hdfs
-
-Given 1000 documents in solr with logtime from 2014-01-04T05:00:00.000Z to 2014-01-06T20:00:00.000Z
-When start archive_audit_logs job with parameters start=2014-01-04T05:00:00.000Z,end=2014-01-06T20:00:00.000Z,destination=HDFS after 2 seconds
-Then Check 7 files exists on hdfs with filenames containing the text audit_logs_-_2014-01-0 in the folder /test_audit_logs after 10 seconds
-And solr does not contain documents between 2014-01-04T05:00:00.000Z and 2014-01-06T20:00:00.000Z after 10 seconds
-
-
 Scenario: Archiving documents to local filesystem
 
 Given 200 documents in solr with logtime from 2014-02-04T05:00:00.000Z to 2014-02-06T20:00:00.000Z
@@ -59,9 +53,10 @@ And solr does not contain documents between 2014-02-04T05:00:00.000Z and 2014-02
 
 Scenario: Launch Archiving job. Initiate stop and check that part of the data is archived. After restart all data must be extracted.
 
-Given 200 documents in solr with logtime from 2014-03-09T05:00:00.000Z to 2014-03-09T20:00:00.000Z
+Given 500 documents in solr with logtime from 2014-03-09T05:00:00.000Z to 2014-03-09T20:00:00.000Z
 When start archive_audit_logs job with parameters writeBlockSize=20,start=2014-03-09T05:00:00.000Z,end=2014-03-09T20:00:00.000Z after 2 seconds
-And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text solr_archive_audit_logs_-_2014-03-09 within 10 seconds
-Then Less than 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
+And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text archives/audit_logs/audit_logs_-_2014-03-09 within 10 seconds
+Then Less than 20 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2014-03-09 after 20 seconds
 When restart archive_audit_logs job within 10 seconds
-Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
+Then Check 25 files exists on s3 server with filenames containing the text archives/audit_logs/audit_logs_-_2014-03-09 after 20 seconds
+And Check the files archives/audit_logs/audit_logs_-_2014-03-09 contains the archived documents
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/.gitignore b/ambari-infra/ambari-infra-manager/.gitignore
index 94b3829..dd3661e 100644
--- a/ambari-infra/ambari-infra-manager/.gitignore
+++ b/ambari-infra/ambari-infra-manager/.gitignore
@@ -2,4 +2,7 @@ out/*
 *.pid
 Profile
 .env
-test-out
\ No newline at end of file
+test-out
+test.db
+**/swagger/swagger.json
+**/swagger/swagger.yaml
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docker/Dockerfile b/ambari-infra/ambari-infra-manager/docker/Dockerfile
index eaefe95..4d039c8 100644
--- a/ambari-infra/ambari-infra-manager/docker/Dockerfile
+++ b/ambari-infra/ambari-infra-manager/docker/Dockerfile
@@ -22,10 +22,11 @@ RUN yum -y install glibc-common
 ENV HOME /root
 
 #Install JAVA
-ENV JAVA_VERSION 8u131
-ENV BUILD_VERSION b11
-RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm
-RUN rpm -ivh jdk-8-linux-x64.rpm
+ENV JAVA_VERSION 11.0.1
+ENV BUILD_VERSION 13
+
+RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION+$BUILD_VERSION/90cf5d8f270a4347a95050320eef3fb7/jdk-${JAVA_VERSION}_linux-x64_bin.rpm -O jdk-11-linux-x64.rpm
+RUN rpm -ivh jdk-11-linux-x64.rpm
 ENV JAVA_HOME /usr/java/default/
 
 #Install Maven
diff --git a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml b/ambari-infra/ambari-infra-manager/docker/docker-compose.yml
index 602835e..8983886 100644
--- a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml
+++ b/ambari-infra/ambari-infra-manager/docker/docker-compose.yml
@@ -15,6 +15,7 @@
 version: '3.3'
 services:
   zookeeper:
+    container_name: zookeeper
     image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10}
     restart: always
     hostname: zookeeper
@@ -28,6 +29,7 @@ services:
   solr:
 #  TODO: use infra-solr
     image: solr:${SOLR_VERSION:-7.7.0}
+    container_name: solr
     restart: always
     hostname: solr
     ports:
@@ -45,14 +47,16 @@ services:
       - "-z"
       - ${ZOOKEEPER_CONNECTION_STRING}
     volumes:
-      - $AMBARI_LOCATION/ambari-logsearch/ambari-logsearch-server/src/main/configsets:/opt/solr/configsets
+      - $AMBARI_INFRA_LOCATION/ambari-infra-manager/docker/configsets:/opt/solr/configsets
   fakes3:
+    container_name: fakes3
     image: localstack/localstack
     hostname: fakes3
     ports:
       - "4569:4569"
     environment:
       - SERVICES=s3:4569
+      - DEBUG=s3
     networks:
       infra-network:
         aliases:
@@ -60,6 +64,7 @@ services:
     env_file:
       - Profile
   namenode:
+    container_name: hdfs_namenode
     image: flokkr/hadoop-hdfs-namenode:${HADOOP_VERSION:-3.0.0}
     hostname: namenode
     ports:
@@ -72,6 +77,7 @@ services:
     networks:
       - infra-network
   datanode:
+    container_name: hdfs_datanode
     image: flokkr/hadoop-hdfs-datanode:${HADOOP_VERSION:-3.0.0}
     links:
       - namenode
@@ -80,6 +86,7 @@ services:
     networks:
       - infra-network
   inframanager:
+    container_name: infra_manager
     image: ambari-infra-manager:v1.0
     restart: always
     hostname: infra-manager.apache.org
@@ -91,13 +98,13 @@ services:
       - 61890:61890
       - 5007:5007
     environment:
-      COMPONENT: infra-manager
-      COMPONENT_LOG: infra-manager
+      COMPONENT: infra_manager
+      COMPONENT_LOG: infra_manager
       ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING}
       DISPLAY: $DOCKERIP:0
     volumes:
-      - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/target/package:/root/ambari-infra-manager
-      - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/docker/test-out:/root/archive
+      - $AMBARI_INFRA_LOCATION/ambari-infra-manager/target/package:/root/ambari-infra-manager
+      - $AMBARI_INFRA_LOCATION/ambari-infra-manager/docker/test-out:/root/archive
 networks:
   infra-network:
     driver: bridge
diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh
index d32a64d..094a3d4 100755
--- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh
+++ b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh
@@ -61,13 +61,13 @@ function check_env_file() {
 
 function setup_env() {
   pushd $sdir/../../
-  local AMBARI_LOCATION=$(pwd)
+  local AMBARI_INFRA_LOCATION=$(pwd)
   popd
   local docker_ip=$(get_docker_ip)
   cat << EOF > $sdir/.env
 DOCKERIP=$docker_ip
 MAVEN_REPOSITORY_LOCATION=$HOME/.m2
-AMBARI_LOCATION=$AMBARI_LOCATION
+AMBARI_INFRA_LOCATION=$AMBARI_INFRA_LOCATION
 
 ZOOKEEPER_VERSION=3.4.10
 ZOOKEEPER_CONNECTION_STRING=zookeeper:2181
diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
index 87d6b8a..31b9e21 100755
--- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
+++ b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
@@ -41,10 +41,10 @@ function start_infra_manager_container() {
  local AMBARI_INFRA_MANAGER_LOCATION=$(pwd)
  popd
  kill_infra_manager_container
- docker run -d --name infra-manager --hostname infra-manager.apache.org \
+ docker run -d --name infra_manager --hostname infra-manager.apache.org \
    -v $AMBARI_INFRA_MANAGER_LOCATION/target/package:/root/ambari-infra-manager -p 61890:61890 -p 5007:5007 \
    ambari-infra-manager:v1.0
-  ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' logsearch)
+  ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' infra_manager)
   echo "Ambari Infra Manager container started on $ip_address (for Mac OSX route to boot2docker/docker-machine VM address, e.g.: 'sudo route add -net 172.17.0.0/16 192.168.59.103')"
   echo "You can follow Log Search logs with 'docker logs -f infra-manager' command"
 }
diff --git a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml b/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml
deleted file mode 100644
index 6fad22d..0000000
--- a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml
+++ /dev/null
@@ -1,784 +0,0 @@
----
-swagger: "2.0"
-info:
-  description: "Manager component for Ambari Infra"
-  version: "1.0.0"
-  title: "Infra Manager REST API"
-  license:
-    name: "Apache 2.0"
-    url: "http://www.apache.org/licenses/LICENSE-2.0.html"
-basePath: "/api/v1"
-tags:
-- name: "jobs"
-schemes:
-- "http"
-- "https"
-paths:
-  /jobs:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get all jobs"
-      description: ""
-      operationId: "getAllJobs"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "page"
-        in: "query"
-        required: false
-        type: "integer"
-        default: 0
-        format: "int32"
-      - name: "size"
-        in: "query"
-        required: false
-        type: "integer"
-        default: 20
-        format: "int32"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            type: "array"
-            items:
-              $ref: "#/definitions/JobInfo"
-  /jobs/executions:
-    delete:
-      tags:
-      - "jobs"
-      summary: "Stop all job executions."
-      description: ""
-      operationId: "stopAll"
-      produces:
-      - "application/json"
-      parameters: []
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            type: "integer"
-            format: "int32"
-  /jobs/executions/{jobExecutionId}:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get job and step details for job execution instance."
-      description: ""
-      operationId: "getExecutionInfo"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/JobExecutionDetailsResponse"
-    delete:
-      tags:
-      - "jobs"
-      summary: "Stop or abandon a running job execution."
-      description: ""
-      operationId: "stopOrAbandonJobExecution"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      - name: "operation"
-        in: "query"
-        required: true
-        type: "string"
-        enum:
-        - "STOP"
-        - "ABANDON"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/JobExecutionInfoResponse"
-  /jobs/executions/{jobExecutionId}/context:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get execution context for specific job."
-      description: ""
-      operationId: "getExecutionContextByJobExecId"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/ExecutionContextResponse"
-  /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get step execution details."
-      description: ""
-      operationId: "getStepExecution"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      - name: "stepExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/StepExecutionInfoResponse"
-  /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get the execution context of step execution."
-      description: ""
-      operationId: "getStepExecutionContext"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      - name: "stepExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/StepExecutionContextResponse"
-  /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/progress:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get progress of step execution."
-      description: ""
-      operationId: "getStepExecutionProgress"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      - name: "stepExecutionId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/StepExecutionProgressResponse"
-  /jobs/info/names:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get all job names"
-      description: ""
-      operationId: "getAllJobNames"
-      produces:
-      - "application/json"
-      parameters: []
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            type: "array"
-            uniqueItems: true
-            items:
-              type: "string"
-  /jobs/{jobName}:
-    post:
-      tags:
-      - "jobs"
-      summary: "Start a new job instance by job name."
-      description: ""
-      operationId: "startJob"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobName"
-        in: "path"
-        required: true
-        type: "string"
-      - name: "params"
-        in: "query"
-        required: false
-        type: "string"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/JobExecutionInfoResponse"
-  /jobs/{jobName}/executions:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get the id values of all the running job instances."
-      description: ""
-      operationId: "getExecutionIdsByJobName"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobName"
-        in: "path"
-        required: true
-        type: "string"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            type: "array"
-            uniqueItems: true
-            items:
-              type: "integer"
-              format: "int64"
-  /jobs/{jobName}/info:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get job details by job name."
-      description: ""
-      operationId: "getJobDetails"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "page"
-        in: "query"
-        required: false
-        type: "integer"
-        default: 0
-        format: "int32"
-      - name: "size"
-        in: "query"
-        required: false
-        type: "integer"
-        default: 20
-        format: "int32"
-      - name: "jobName"
-        in: "path"
-        required: true
-        type: "string"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/JobDetailsResponse"
-  /jobs/{jobName}/{jobInstanceId}/executions:
-    get:
-      tags:
-      - "jobs"
-      summary: "Get execution for job instance."
-      description: ""
-      operationId: "getExecutionsForInstance"
-      produces:
-      - "application/json"
-      parameters:
-      - name: "jobName"
-        in: "path"
-        required: true
-        type: "string"
-      - name: "jobInstanceId"
-        in: "path"
-        required: true
-        type: "integer"
-        format: "int64"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            type: "array"
-            items:
-              $ref: "#/definitions/JobExecutionInfoResponse"
-    post:
-      tags:
-      - "jobs"
-      summary: "Restart job instance."
-      description: ""
-      operationId: "restartJobInstance"
-      produces:
-      - "application/json"
-      parameters:
-      - in: "body"
-        name: "body"
-        required: false
-        schema:
-          $ref: "#/definitions/JobExecutionRestartRequest"
-      responses:
-        200:
-          description: "successful operation"
-          schema:
-            $ref: "#/definitions/JobExecutionInfoResponse"
-definitions:
-  JobExecutionData:
-    type: "object"
-    properties:
-      id:
-        type: "integer"
-        format: "int64"
-      executionContext:
-        $ref: "#/definitions/ExecutionContext"
-      jobInstance:
-        $ref: "#/definitions/JobInstance"
-      jobId:
-        type: "integer"
-        format: "int64"
-      jobParameters:
-        $ref: "#/definitions/JobParameters"
-      failureExceptions:
-        type: "array"
-        items:
-          $ref: "#/definitions/Throwable"
-      endTime:
-        type: "string"
-        format: "date-time"
-      exitStatus:
-        $ref: "#/definitions/ExitStatus"
-      createTime:
-        type: "string"
-        format: "date-time"
-      lastUpdated:
-        type: "string"
-        format: "date-time"
-      jobConfigurationName:
-        type: "string"
-      startTime:
-        type: "string"
-        format: "date-time"
-      status:
-        type: "string"
-        enum:
-        - "COMPLETED"
-        - "STARTING"
-        - "STARTED"
-        - "STOPPING"
-        - "STOPPED"
-        - "FAILED"
-        - "ABANDONED"
-        - "UNKNOWN"
-      stepExecutionDataList:
-        type: "array"
-        items:
-          $ref: "#/definitions/StepExecutionData"
-  JobInstance:
-    type: "object"
-    properties:
-      id:
-        type: "integer"
-        format: "int64"
-      version:
-        type: "integer"
-        format: "int32"
-      jobName:
-        type: "string"
-      instanceId:
-        type: "integer"
-        format: "int64"
-  StepExecutionData:
-    type: "object"
-    properties:
-      id:
-        type: "integer"
-        format: "int64"
-      jobExecutionId:
-        type: "integer"
-        format: "int64"
-      executionContext:
-        $ref: "#/definitions/ExecutionContext"
-      stepName:
-        type: "string"
-      terminateOnly:
-        type: "boolean"
-        default: false
-      failureExceptions:
-        type: "array"
-        items:
-          $ref: "#/definitions/Throwable"
-      endTime:
-        type: "string"
-        format: "date-time"
-      exitStatus:
-        $ref: "#/definitions/ExitStatus"
-      lastUpdated:
-        type: "string"
-        format: "date-time"
-      commitCount:
-        type: "integer"
-        format: "int32"
-      readCount:
-        type: "integer"
-        format: "int32"
-      filterCount:
-        type: "integer"
-        format: "int32"
-      writeCount:
-        type: "integer"
-        format: "int32"
-      readSkipCount:
-        type: "integer"
-        format: "int32"
-      writeSkipCount:
-        type: "integer"
-        format: "int32"
-      processSkipCount:
-        type: "integer"
-        format: "int32"
-      rollbackCount:
-        type: "integer"
-        format: "int32"
-      startTime:
-        type: "string"
-        format: "date-time"
-      status:
-        type: "string"
-        enum:
-        - "COMPLETED"
-        - "STARTING"
-        - "STARTED"
-        - "STOPPING"
-        - "STOPPED"
-        - "FAILED"
-        - "ABANDONED"
-        - "UNKNOWN"
-  StackTraceElement:
-    type: "object"
-    properties:
-      methodName:
-        type: "string"
-      fileName:
-        type: "string"
-      lineNumber:
-        type: "integer"
-        format: "int32"
-      className:
-        type: "string"
-      nativeMethod:
-        type: "boolean"
-        default: false
-  JobExecutionDetailsResponse:
-    type: "object"
-    properties:
-      jobExecutionInfoResponse:
-        $ref: "#/definitions/JobExecutionInfoResponse"
-      stepExecutionInfoList:
-        type: "array"
-        items:
-          $ref: "#/definitions/StepExecutionInfoResponse"
-  StepExecutionContextResponse:
-    type: "object"
-    properties:
-      executionContextMap:
-        type: "object"
-        additionalProperties:
-          type: "object"
-      jobExecutionId:
-        type: "integer"
-        format: "int64"
-      stepExecutionId:
-        type: "integer"
-        format: "int64"
-      stepName:
-        type: "string"
-  StepExecutionProgress:
-    type: "object"
-    properties:
-      estimatedPercentCompleteMessage:
-        $ref: "#/definitions/MessageSourceResolvable"
-      estimatedPercentComplete:
-        type: "number"
-        format: "double"
-  ExitStatus:
-    type: "object"
-    properties:
-      exitCode:
-        type: "string"
-      exitDescription:
-        type: "string"
-      running:
-        type: "boolean"
-        default: false
-  ExecutionContextResponse:
-    type: "object"
-    properties:
-      jobExecutionId:
-        type: "integer"
-        format: "int64"
-      executionContextMap:
-        type: "object"
-        additionalProperties:
-          type: "object"
-  StepExecutionHistory:
-    type: "object"
-    properties:
-      stepName:
-        type: "string"
-      count:
-        type: "integer"
-        format: "int32"
-      commitCount:
-        $ref: "#/definitions/CumulativeHistory"
-      rollbackCount:
-        $ref: "#/definitions/CumulativeHistory"
-      readCount:
-        $ref: "#/definitions/CumulativeHistory"
-      writeCount:
-        $ref: "#/definitions/CumulativeHistory"
-      filterCount:
-        $ref: "#/definitions/CumulativeHistory"
-      readSkipCount:
-        $ref: "#/definitions/CumulativeHistory"
-      writeSkipCount:
-        $ref: "#/definitions/CumulativeHistory"
-      processSkipCount:
-        $ref: "#/definitions/CumulativeHistory"
-      duration:
-        $ref: "#/definitions/CumulativeHistory"
-      durationPerRead:
-        $ref: "#/definitions/CumulativeHistory"
-  TimeZone:
-    type: "object"
-    properties:
-      displayName:
-        type: "string"
-      id:
-        type: "string"
-      dstsavings:
-        type: "integer"
-        format: "int32"
-      rawOffset:
-        type: "integer"
-        format: "int32"
-  MessageSourceResolvable:
-    type: "object"
-    properties:
-      arguments:
-        type: "array"
-        items:
-          type: "object"
-      codes:
-        type: "array"
-        items:
-          type: "string"
-      defaultMessage:
-        type: "string"
-  ExecutionContext:
-    type: "object"
-    properties:
-      dirty:
-        type: "boolean"
-        default: false
-      empty:
-        type: "boolean"
-        default: false
-  StepExecutionInfoResponse:
-    type: "object"
-    properties:
-      id:
-        type: "integer"
-        format: "int64"
-      jobExecutionId:
-        type: "integer"
-        format: "int64"
-      jobName:
-        type: "string"
-      name:
-        type: "string"
-      startDate:
-        type: "string"
-      startTime:
-        type: "string"
-      duration:
-        type: "string"
-      durationMillis:
-        type: "integer"
-        format: "int64"
-      exitCode:
-        type: "string"
-      status:
-        type: "string"
-  JobExecutionInfoResponse:
-    type: "object"
-    properties:
-      id:
-        type: "integer"
-        format: "int64"
-      stepExecutionCount:
-        type: "integer"
-        format: "int32"
-      jobId:
-        type: "integer"
-        format: "int64"
-      jobName:
-        type: "string"
-      startDate:
-        type: "string"
-      startTime:
-        type: "string"
-      duration:
-        type: "string"
-      jobExecutionData:
-        $ref: "#/definitions/JobExecutionData"
-      jobParameters:
-        type: "object"
-        additionalProperties:
-          type: "object"
-      jobParametersString:
-        type: "string"
-      restartable:
-        type: "boolean"
-        default: false
-      abandonable:
-        type: "boolean"
-        default: false
-      stoppable:
-        type: "boolean"
-        default: false
-      timeZone:
-        $ref: "#/definitions/TimeZone"
-  JobInfo:
-    type: "object"
-    properties:
-      name:
-        type: "string"
-      executionCount:
-        type: "integer"
-        format: "int32"
-      launchable:
-        type: "boolean"
-        default: false
-      incrementable:
-        type: "boolean"
-        default: false
-      jobInstanceId:
-        type: "integer"
-        format: "int64"
-  JobExecutionRestartRequest:
-    type: "object"
-    properties:
-      jobName:
-        type: "string"
-      jobInstanceId:
-        type: "integer"
-        format: "int64"
-      operation:
-        type: "string"
-        enum:
-        - "RESTART"
-  Throwable:
-    type: "object"
-    properties:
-      cause:
-        $ref: "#/definitions/Throwable"
-      stackTrace:
-        type: "array"
-        items:
-          $ref: "#/definitions/StackTraceElement"
-      message:
-        type: "string"
-      localizedMessage:
-        type: "string"
-      suppressed:
-        type: "array"
-        items:
-          $ref: "#/definitions/Throwable"
-  JobParameters:
-    type: "object"
-    properties:
-      parameters:
-        type: "object"
-        additionalProperties:
-          $ref: "#/definitions/JobParameter"
-      empty:
-        type: "boolean"
-        default: false
-  CumulativeHistory:
-    type: "object"
-    properties:
-      count:
-        type: "integer"
-        format: "int32"
-      min:
-        type: "number"
-        format: "double"
-      max:
-        type: "number"
-        format: "double"
-      standardDeviation:
-        type: "number"
-        format: "double"
-      mean:
-        type: "number"
-        format: "double"
-  JobInstanceDetailsResponse:
-    type: "object"
-    properties:
-      jobInstance:
-        $ref: "#/definitions/JobInstance"
-      jobExecutionInfoResponseList:
-        type: "array"
-        items:
-          $ref: "#/definitions/JobExecutionInfoResponse"
-  JobParameter:
-    type: "object"
-    properties:
-      identifying:
-        type: "boolean"
-        default: false
-      value:
-        type: "object"
-      type:
-        type: "string"
-        enum:
-        - "STRING"
-        - "DATE"
-        - "LONG"
-        - "DOUBLE"
-  StepExecutionProgressResponse:
-    type: "object"
-    properties:
-      stepExecutionProgress:
-        $ref: "#/definitions/StepExecutionProgress"
-      stepExecutionHistory:
-        $ref: "#/definitions/StepExecutionHistory"
-      stepExecutionInfoResponse:
-        $ref: "#/definitions/StepExecutionInfoResponse"
-  JobDetailsResponse:
-    type: "object"
-    properties:
-      jobInfo:
-        $ref: "#/definitions/JobInfo"
-      jobInstanceDetailsResponseList:
-        type: "array"
-        items:
-          $ref: "#/definitions/JobInstanceDetailsResponse"
diff --git a/ambari-infra/ambari-infra-manager/pom.xml b/ambari-infra/ambari-infra-manager/pom.xml
index e300c8b..2612628 100644
--- a/ambari-infra/ambari-infra-manager/pom.xml
+++ b/ambari-infra/ambari-infra-manager/pom.xml
@@ -29,17 +29,18 @@
   <artifactId>ambari-infra-manager</artifactId>
 
   <properties>
-    <spring.version>4.3.17.RELEASE</spring.version>
-    <spring.security.version>4.2.4.RELEASE</spring.security.version>
-    <spring.ldap.version>2.2.0.RELEASE</spring.ldap.version>
-    <jersey.version>2.25.1</jersey.version>
-    <jetty.version>9.4.11.v20180605</jetty.version>
-    <spring-batch.version>3.0.7.RELEASE</spring-batch.version>
+    <spring.version>5.1.8.RELEASE</spring.version>
+    <spring.security.version>5.1.5.RELEASE</spring.security.version>
+    <spring.ldap.version>2.3.2.RELEASE</spring.ldap.version>
+    <jersey.version>2.27</jersey.version>
+    <spring-batch.version>4.1.0.RELEASE</spring-batch.version>
     <sqlite.version>3.8.11.2</sqlite.version>
-    <spring-data-solr.version>2.0.2.RELEASE</spring-data-solr.version>
-    <spring-boot.version>1.5.13.RELEASE</spring-boot.version>
+    <spring-data-solr.version>3.0.10.RELEASE</spring-data-solr.version>
+    <spring-boot.version>2.1.5.RELEASE</spring-boot.version>
     <swagger.version>1.5.16</swagger.version>
     <jjwt.version>0.6.0</jjwt.version>
+    <aws-sdk.version>1.11.445</aws-sdk.version>
+    <swagger-maven-plugin-version>3.1.7</swagger-maven-plugin-version>
   </properties>
 
   <build>
@@ -48,11 +49,6 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.0</version>
-        <configuration>
-          <source>${jdk.version}</source>
-          <target>${jdk.version}</target>
-        </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
@@ -115,11 +111,59 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>com.github.kongchen</groupId>
+        <artifactId>swagger-maven-plugin</artifactId>
+        <version>${swagger-maven-plugin-version}</version>
+        <configuration>
+          <apiSources>
+            <apiSource>
+              <springmvc>false</springmvc>
+              <locations>org.apache.ambari.infra.rest</locations>
+              <schemes>
+                <scheme>http</scheme>
+                <scheme>https</scheme>
+              </schemes>
+              <basePath>/api/v1</basePath>
+              <info>
+                <title>Infra Manager REST API</title>
+                <version>1.0.0</version>
+                <description>Ambari Infra Manager REST APIs has inherent support for querying, sorting and pagination</description>
+                <license>
+                  <url>http://www.apache.org/licenses/LICENSE-2.0</url>
+                  <name>Apache License, Version 2.0</name>
+                </license>
+              </info>
+              <outputFormats>yaml,json</outputFormats>
+              <swaggerDirectory>${project.basedir}/src/main/resources/swagger</swaggerDirectory>
+              <securityDefinitions>
+                <securityDefinition>
+                  <name>basicAuth</name>
+                  <type>basic</type>
+                </securityDefinition>
+              </securityDefinitions>
+            </apiSource>
+          </apiSources>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>compile</phase>
+            <goals>
+              <goal>generate</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
   <dependencies>
     <dependency>
+      <groupId>javax.validation</groupId>
+      <artifactId>validation-api</artifactId>
+      <version>2.0.1.Final</version>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -133,8 +177,8 @@
     <dependency>
       <groupId>org.hamcrest</groupId>
       <artifactId>hamcrest-all</artifactId>
-      <version>1.3</version>
       <scope>test</scope>
+
     </dependency>
     <!-- Spring dependencies -->
     <dependency>
@@ -199,27 +243,43 @@
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-databind</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.springframework.boot</groupId>
+          <artifactId>spring-boot-starter-tomcat</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-security</artifactId>
+      <artifactId>spring-boot-starter-jetty</artifactId>
       <version>${spring-boot.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework.boot</groupId>
+          <artifactId>spring-boot-starter-tomcat</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-actuator</artifactId>
+      <artifactId>spring-boot-starter-security</artifactId>
       <version>${spring-boot.version}</version>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-jetty</artifactId>
+      <artifactId>spring-boot-starter-actuator</artifactId>
       <version>${spring-boot.version}</version>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
       <artifactId>spring-boot-starter-jersey</artifactId>
       <version>${spring-boot.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework.boot</groupId>
+          <artifactId>spring-boot-starter-tomcat</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
@@ -311,6 +371,10 @@
           <artifactId>slf4j-log4j12</artifactId>
         </exclusion>
         <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>org.eclipse.jetty</groupId>
           <artifactId>jetty-server</artifactId>
         </exclusion>
@@ -330,9 +394,54 @@
           <groupId>org.bouncycastle</groupId>
           <artifactId>bcprov-jdk15on</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.google.code.gson</groupId>
+          <artifactId>gson</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.commons</groupId>
+          <artifactId>commons-compress</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <!-- AWS -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.amazonaws</groupId>
+          <artifactId>aws-java-sdk-bundle</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-core</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-dynamodb</artifactId>
+      <version>${aws-sdk.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs-client</artifactId>
       <version>${hadoop.version}</version>
@@ -383,7 +492,7 @@
     <dependency>
       <groupId>com.thoughtworks.xstream</groupId>
       <artifactId>xstream</artifactId>
-      <version>1.4.10</version>
+      <version>1.4.11.1</version>
     </dependency>
     <dependency>
       <groupId>cglib</groupId>
@@ -419,7 +528,7 @@
     <dependency>
       <groupId>org.webjars</groupId>
       <artifactId>swagger-ui</artifactId>
-      <version>2.2.2</version>
+      <version>3.19.0</version>
     </dependency>
     <dependency>
       <groupId>org.springframework</groupId>
@@ -455,17 +564,16 @@
           <groupId>org.slf4j</groupId>
           <artifactId>slf4j-log4j12</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.springframework.batch</groupId>
+          <artifactId>spring-batch-admin-resources</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
       <artifactId>guava</artifactId>
       <groupId>com.google.guava</groupId>
-      <version>20.0</version>
-    </dependency>
-    <dependency>
-      <groupId>com.amazonaws</groupId>
-      <artifactId>aws-java-sdk-s3</artifactId>
-      <version>1.11.5</version>
+      <version>26.0-jre</version>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
@@ -473,11 +581,27 @@
       <version>1.5</version>
     </dependency>
     <dependency>
-      <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-tomcat</artifactId>
-      <version>${spring-boot.version}</version>
-      <scope>provided</scope>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+      <version>1.18</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.xml.bind</groupId>
+      <artifactId>jaxb-api</artifactId>
+      <version>2.3.1</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-web</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+
   </dependencies>
 
 </project>
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
index 938cfd0..87622ad 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
@@ -19,15 +19,16 @@
 package org.apache.ambari.infra;
 
 import org.springframework.boot.Banner;
+import org.springframework.boot.WebApplicationType;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration;
 import org.springframework.boot.autoconfigure.data.rest.RepositoryRestMvcAutoConfiguration;
 import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
-import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
+import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
 import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
-import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration;
+import org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfiguration;
 import org.springframework.boot.builder.SpringApplicationBuilder;
-import org.springframework.boot.system.ApplicationPidFileWriter;
+import org.springframework.boot.context.ApplicationPidFileWriter;
 
 @SpringBootApplication(
   scanBasePackages = {"org.apache.ambari.infra"},
@@ -47,7 +48,7 @@ public class InfraManager {
     new SpringApplicationBuilder(InfraManager.class)
       .bannerMode(Banner.Mode.OFF)
       .listeners(new ApplicationPidFileWriter(pidFile))
-      .web(true)
+      .web(WebApplicationType.SERVLET)
       .run(args);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
index 86059a2..39fff7d 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
@@ -31,6 +31,8 @@ public class InfraManagerConfig {
 
   @Bean
   public static PropertySourcesPlaceholderConfigurer propertyConfigurer() {
-    return new PropertySourcesPlaceholderConfigurer();
+    PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer = new PropertySourcesPlaceholderConfigurer();
+    propertySourcesPlaceholderConfigurer.setNullValue("@null");
+    return propertySourcesPlaceholderConfigurer;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java
index 3a4c00f..338fac4 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java
@@ -18,20 +18,19 @@
  */
 package org.apache.ambari.infra.conf;
 
+import javax.ws.rs.ApplicationPath;
+
 import org.apache.ambari.infra.rest.JobResource;
-import org.glassfish.jersey.jackson.JacksonFeature;
+import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletProperties;
 
-import javax.ws.rs.ApplicationPath;
-
 @ApplicationPath("/api/v1")
 public class InfraManagerJerseyResourceConfig extends ResourceConfig {
 
   public InfraManagerJerseyResourceConfig() {
     packages(JobResource.class.getPackage().getName());
-    register(JacksonFeature.class);
+    register(JacksonJaxbJsonProvider.class);
     property(ServletProperties.FILTER_FORWARD_ON_404, true);
   }
-
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java
index 06aea79..dd101b0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java
@@ -18,31 +18,19 @@
  */
 package org.apache.ambari.infra.conf;
 
-import org.eclipse.jetty.server.Server;
+import javax.inject.Inject;
+
 import org.eclipse.jetty.servlet.DefaultServlet;
 import org.glassfish.jersey.servlet.ServletContainer;
 import org.glassfish.jersey.servlet.ServletProperties;
-import org.springframework.beans.factory.annotation.Value;
 import org.springframework.boot.autoconfigure.web.ServerProperties;
-import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
-import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainer;
-import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
 import org.springframework.boot.web.servlet.ServletRegistrationBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
-import javax.inject.Inject;
-
 @Configuration
 public class InfraManagerServletConfig {
 
-  private static final Integer SESSION_TIMEOUT = 60 * 30;
-  private static final String INFRA_MANAGER_SESSIONID = "INFRAMANAGER_SESSIONID";
-  private static final String INFRA_MANAGER_APPLICATION_NAME = "infra-manager";
-
-  @Value("${infra-manager.server.port:61890}")
-  private int port;
-
   @Inject
   private ServerProperties serverProperties;
 
@@ -52,32 +40,17 @@ public class InfraManagerServletConfig {
 
   @Bean
   public ServletRegistrationBean jerseyServlet() {
-    ServletRegistrationBean jerseyServletBean = new ServletRegistrationBean(new ServletContainer(), "/api/v1/*");
+    ServletRegistrationBean jerseyServletBean = new ServletRegistrationBean<>(new ServletContainer(), "/api/v1/*");
     jerseyServletBean.addInitParameter(ServletProperties.JAXRS_APPLICATION_CLASS, InfraManagerJerseyResourceConfig.class.getName());
     return jerseyServletBean;
   }
 
   @Bean
   public ServletRegistrationBean dataServlet() {
-    ServletRegistrationBean dataServletBean = new ServletRegistrationBean(new DefaultServlet(), "/files/*");
+    ServletRegistrationBean dataServletBean = new ServletRegistrationBean<>(new DefaultServlet(), "/files/*");
     dataServletBean.addInitParameter("dirAllowed","true");
     dataServletBean.addInitParameter("pathInfoOnly","true");
     dataServletBean.addInitParameter("resourceBase", infraManagerDataConfig.getDataFolder());
     return dataServletBean;
   }
-
-  @Bean
-  public EmbeddedServletContainerFactory containerFactory() {
-    final JettyEmbeddedServletContainerFactory jettyEmbeddedServletContainerFactory = new JettyEmbeddedServletContainerFactory() {
-      @Override
-      protected JettyEmbeddedServletContainer getJettyEmbeddedServletContainer(Server server) {
-        return new JettyEmbeddedServletContainer(server);
-      }
-    };
-    jettyEmbeddedServletContainerFactory.setSessionTimeout(SESSION_TIMEOUT);
-    serverProperties.getSession().getCookie().setName(INFRA_MANAGER_SESSIONID);
-    serverProperties.setDisplayName(INFRA_MANAGER_APPLICATION_NAME);
-    jettyEmbeddedServletContainerFactory.setPort(port);
-    return jettyEmbeddedServletContainerFactory;
-  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerWebServerCustomizer.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerWebServerCustomizer.java
new file mode 100644
index 0000000..9ed0861
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerWebServerCustomizer.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf;
+
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+import java.time.Duration;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.apache.ambari.infra.conf.security.SslSecrets;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.web.ServerProperties;
+import org.springframework.boot.web.embedded.jetty.JettyServletWebServerFactory;
+import org.springframework.boot.web.server.Ssl;
+import org.springframework.boot.web.server.WebServerFactoryCustomizer;
+
+@Named
+public class InfraManagerWebServerCustomizer implements WebServerFactoryCustomizer<JettyServletWebServerFactory> {
+
+  @Value("${infra-manager.server.port:61890}")
+  private int port;
+
+  @Value("${infra-manager.server.ssl.enabled:false}")
+  private boolean sslEnabled;
+
+  @Inject
+  private ServerProperties serverProperties;
+
+  @Inject
+  private SslSecrets sslSecrets;
+
+  private static final Integer SESSION_TIMEOUT = 60 * 30;
+  private static final String INFRA_MANAGER_SESSION_ID = "INFRAMANAGER_SESSIONID";
+  private static final String INFRA_MANAGER_APPLICATION_NAME = "infra-manager";
+
+  @Override
+  public void customize(JettyServletWebServerFactory factory) {
+    factory.setPort(port);
+    factory.setDisplayName(INFRA_MANAGER_APPLICATION_NAME);
+    factory.getSession().getCookie().setName(INFRA_MANAGER_SESSION_ID);
+    factory.getSession().setTimeout(Duration.ofSeconds(SESSION_TIMEOUT));
+
+    Ssl ssl = new Ssl();
+    String keyStore = System.getProperty("javax.net.ssl.keyStore");
+    if (isNotBlank(keyStore)) {
+      ssl.setKeyStore(keyStore);
+      ssl.setKeyStoreType(System.getProperty("javax.net.ssl.keyStoreType"));
+      String keyStorePassword = sslSecrets.getKeyStorePassword().get().orElseThrow(() -> new IllegalStateException("Password for keystore is not set!"));
+      ssl.setKeyStorePassword(keyStorePassword);
+      System.setProperty("javax.net.ssl.keyStorePassword", keyStorePassword);
+    }
+
+    String trustStore = System.getProperty("javax.net.ssl.trustStore");
+    if (isNotBlank(trustStore)) {
+      ssl.setTrustStore(trustStore);
+      ssl.setTrustStoreType(System.getProperty("javax.net.ssl.trustStoreType"));
+      String trustStorePassword = sslSecrets.getTrustStorePassword().get().orElseThrow(() -> new IllegalStateException("Password for truststore is not set!"));
+      ssl.setKeyStorePassword(trustStorePassword);
+      System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword);
+    }
+
+    ssl.setEnabled(sslEnabled);
+
+    factory.setSsl(ssl);
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java
index f0cd3cf..9651271 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java
@@ -22,11 +22,11 @@ import org.springframework.context.annotation.Configuration;
 import org.springframework.web.servlet.config.annotation.EnableWebMvc;
 import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
 import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
-import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
 
 @EnableWebMvc
 @Configuration
-public class StaticResourceConfiguration extends WebMvcConfigurerAdapter {
+public class StaticResourceConfiguration implements WebMvcConfigurer {
 
   private static final String[] CLASSPATH_RESOURCE_LOCATIONS = {
     "classpath:/static/", "classpath:/swagger/","classpath:META-INF/resources/webjars/"
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
index 706ed8b..97a5c28 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
@@ -18,6 +18,9 @@
  */
 package org.apache.ambari.infra.conf.batch;
 
+import javax.inject.Inject;
+import javax.sql.DataSource;
+
 import org.springframework.batch.admin.service.JdbcSearchableJobExecutionDao;
 import org.springframework.batch.admin.service.JdbcSearchableJobInstanceDao;
 import org.springframework.batch.admin.service.JdbcSearchableStepExecutionDao;
@@ -30,33 +33,24 @@ import org.springframework.batch.core.configuration.JobRegistry;
 import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
 import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
 import org.springframework.batch.core.explore.JobExplorer;
-import org.springframework.batch.core.explore.support.JobExplorerFactoryBean;
 import org.springframework.batch.core.launch.JobLauncher;
 import org.springframework.batch.core.launch.JobOperator;
-import org.springframework.batch.core.launch.support.SimpleJobLauncher;
 import org.springframework.batch.core.launch.support.SimpleJobOperator;
 import org.springframework.batch.core.repository.ExecutionContextSerializer;
 import org.springframework.batch.core.repository.JobRepository;
 import org.springframework.batch.core.repository.dao.ExecutionContextDao;
 import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer;
 import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao;
-import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
-import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.core.io.Resource;
-import org.springframework.core.task.SimpleAsyncTaskExecutor;
 import org.springframework.jdbc.core.JdbcTemplate;
 import org.springframework.jdbc.datasource.DriverManagerDataSource;
 import org.springframework.jdbc.datasource.init.DataSourceInitializer;
 import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
 import org.springframework.scheduling.annotation.EnableAsync;
 import org.springframework.scheduling.annotation.EnableScheduling;
-import org.springframework.transaction.PlatformTransactionManager;
-
-import javax.inject.Inject;
-import javax.sql.DataSource;
 
 @Configuration
 @EnableBatchProcessing
@@ -84,6 +78,12 @@ public class InfraManagerBatchConfig {
 
   @Inject
   private JobRegistry jobRegistry;
+  @Inject
+  private JobExplorer getJobExplorer;
+  @Inject
+  private JobLauncher getJobLauncher;
+  @Inject
+  private JobRepository getJobRepository;
 
   @Bean
   public DataSource dataSource() {
@@ -118,49 +118,16 @@ public class InfraManagerBatchConfig {
   }
 
   @Bean
-  public JobRepository jobRepository() throws Exception {
-    JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
-    factory.setDataSource(dataSource());
-    factory.setTransactionManager(transactionManager());
-    factory.setSerializer(executionContextSerializer());
-    factory.afterPropertiesSet();
-    return factory.getObject();
-  }
-
-  @Bean
-  public PlatformTransactionManager transactionManager() {
-    return new ResourcelessTransactionManager();
-  }
-
-  @Bean(name = "jobLauncher")
-  public JobLauncher jobLauncher() throws Exception {
-    SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
-    jobLauncher.setJobRepository(jobRepository());
-    jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
-    jobLauncher.afterPropertiesSet();
-    return jobLauncher;
-  }
-
-  @Bean
-  public JobOperator jobOperator() throws Exception {
+  public JobOperator jobOperator() {
     SimpleJobOperator jobOperator = new SimpleJobOperator();
-    jobOperator.setJobExplorer(jobExplorer());
-    jobOperator.setJobLauncher(jobLauncher());
+    jobOperator.setJobExplorer(getJobExplorer);
+    jobOperator.setJobLauncher(getJobLauncher);
     jobOperator.setJobRegistry(jobRegistry);
-    jobOperator.setJobRepository(jobRepository());
+    jobOperator.setJobRepository(getJobRepository);
     return jobOperator;
   }
 
   @Bean
-  public JobExplorer jobExplorer() throws Exception {
-    JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean();
-    factoryBean.setSerializer(executionContextSerializer());
-    factoryBean.setDataSource(dataSource());
-    factoryBean.afterPropertiesSet();
-    return factoryBean.getObject();
-  }
-
-  @Bean
   public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor() {
     JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
     jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
@@ -204,9 +171,9 @@ public class InfraManagerBatchConfig {
   }
 
   @Bean
-  public JobService jobService() throws Exception {
+  public JobService jobService() {
     return new
       SimpleJobService(searchableJobInstanceDao(), searchableJobExecutionDao(), searchableStepExecutionDao(),
-      jobRepository(), jobLauncher(), jobRegistry, executionContextDao());
+      getJobRepository, getJobLauncher, jobRegistry, executionContextDao());
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfigurer.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfigurer.java
new file mode 100644
index 0000000..54146c6
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfigurer.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf.batch;
+
+import javax.inject.Inject;
+import javax.sql.DataSource;
+
+import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer;
+import org.springframework.batch.core.explore.JobExplorer;
+import org.springframework.batch.core.explore.support.JobExplorerFactoryBean;
+import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.batch.core.launch.support.SimpleJobLauncher;
+import org.springframework.batch.core.repository.ExecutionContextSerializer;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
+import org.springframework.core.task.SimpleAsyncTaskExecutor;
+import org.springframework.transaction.PlatformTransactionManager;
+
+public class InfraManagerBatchConfigurer extends DefaultBatchConfigurer {
+
+  @Inject
+  private DataSource dataSource;
+  @Inject
+  private ExecutionContextSerializer executionContextSerializer;
+
+  @Override
+  public JobRepository createJobRepository() throws Exception {
+    JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
+    factory.setDataSource(dataSource);
+    factory.setTransactionManager(getTransactionManager());
+    factory.setSerializer(executionContextSerializer);
+    factory.afterPropertiesSet();
+    return factory.getObject();
+  }
+
+  @Override
+  public PlatformTransactionManager getTransactionManager() {
+    return new ResourcelessTransactionManager();
+  }
+
+  @Override
+  protected JobLauncher createJobLauncher() throws Exception {
+    SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
+    jobLauncher.setJobRepository(getJobRepository());
+    jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
+    jobLauncher.afterPropertiesSet();
+    return jobLauncher;
+  }
+
+  @Override
+  protected JobExplorer createJobExplorer() throws Exception {
+    JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean();
+    factoryBean.setSerializer(executionContextSerializer);
+    factoryBean.setDataSource(dataSource);
+    factoryBean.afterPropertiesSet();
+    return factoryBean.getObject();
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositeSecret.java
similarity index 70%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositeSecret.java
index 6d32963..e8ab52e 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositeSecret.java
@@ -20,17 +20,17 @@ package org.apache.ambari.infra.conf.security;
 
 import java.util.Optional;
 
-public class CompositePasswordStore implements PasswordStore {
-  private PasswordStore[] passwordStores;
+public class CompositeSecret implements Secret {
+  private Secret[] secrets;
 
-  public CompositePasswordStore(PasswordStore... passwordStores) {
-    this.passwordStores = passwordStores;
+  public CompositeSecret(Secret... secrets) {
+    this.secrets = secrets;
   }
 
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    for (PasswordStore passwordStore : passwordStores) {
-      Optional<String> optionalPassword = passwordStore.getPassword(propertyName);
+  public Optional<String> get() {
+    for (Secret secret : secrets) {
+      Optional<String> optionalPassword = secret.get();
       if (optionalPassword.isPresent())
         return optionalPassword;
     }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/EnvironmentalSecret.java
similarity index 72%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/EnvironmentalSecret.java
index 8e3387b..887767b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/EnvironmentalSecret.java
@@ -20,9 +20,16 @@ package org.apache.ambari.infra.conf.security;
 
 import java.util.Optional;
 
-public class SecurityEnvironment implements PasswordStore {
+public class EnvironmentalSecret implements Secret {
+
+  private final String environmentalVariableName;
+
+  public EnvironmentalSecret(String environmentalVariableName) {
+    this.environmentalVariableName = environmentalVariableName;
+  }
+
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+  public Optional<String> get() {
+    return Optional.ofNullable(System.getenv(environmentalVariableName));
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredential.java
similarity index 63%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredential.java
index 6d32963..8fba08a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredential.java
@@ -20,20 +20,22 @@ package org.apache.ambari.infra.conf.security;
 
 import java.util.Optional;
 
-public class CompositePasswordStore implements PasswordStore {
-  private PasswordStore[] passwordStores;
+public class HadoopCredential implements Secret {
 
-  public CompositePasswordStore(PasswordStore... passwordStores) {
-    this.passwordStores = passwordStores;
+  private final HadoopCredentialStore hadoopCredentialStore;
+  private final String propertyName;
+
+  public HadoopCredential(HadoopCredentialStore hadoopCredentialStore, String propertyName) {
+    this.propertyName = propertyName;
+    this.hadoopCredentialStore = hadoopCredentialStore;
   }
 
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    for (PasswordStore passwordStore : passwordStores) {
-      Optional<String> optionalPassword = passwordStore.getPassword(propertyName);
-      if (optionalPassword.isPresent())
-        return optionalPassword;
+  public Optional<String> get() {
+    if (hadoopCredentialStore == null) {
+      return Optional.empty();
     }
-    return Optional.empty();
+
+    return hadoopCredentialStore.get(propertyName).map(String::new);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
index 9e1a17f..08a8804 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
@@ -18,16 +18,14 @@
  */
 package org.apache.ambari.infra.conf.security;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Optional;
-
 import static org.apache.commons.lang.StringUtils.isBlank;
 import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
 
-public class HadoopCredentialStore implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerSecurityConfig.class);
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.Optional;
+
+public class HadoopCredentialStore {
   public static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path";
 
   private final String credentialStoreProviderPath;
@@ -36,8 +34,7 @@ public class HadoopCredentialStore implements PasswordStore {
     this.credentialStoreProviderPath = credentialStoreProviderPath;
   }
 
-  @Override
-  public Optional<String> getPassword(String propertyName) {
+  public Optional<char[]> get(String key) {
     try {
       if (isBlank(credentialStoreProviderPath)) {
         return Optional.empty();
@@ -45,11 +42,14 @@ public class HadoopCredentialStore implements PasswordStore {
 
       org.apache.hadoop.conf.Configuration config = new org.apache.hadoop.conf.Configuration();
       config.set(CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY, credentialStoreProviderPath);
-      char[] passwordChars = config.getPassword(propertyName);
-      return (isNotEmpty(passwordChars)) ? Optional.of(new String(passwordChars)) : Optional.empty();
-    } catch (Exception e) {
-      LOG.warn("Could not load password {} from credential store.", propertyName);
-      return Optional.empty();
+      char[] passwordChars = config.getPassword(key);
+      return (isNotEmpty(passwordChars)) ? Optional.of(passwordChars) : Optional.empty();
+    } catch (IOException e) {
+      throw new UncheckedIOException(String.format("Could not load password %s from credential store.", key), e);
     }
   }
+
+  public Secret getSecret(String key) {
+    return new HadoopCredential(this, key);
+  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/Secret.java
similarity index 91%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/Secret.java
index 19848fe..e4f54a3 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/Secret.java
@@ -20,6 +20,6 @@ package org.apache.ambari.infra.conf.security;
 
 import java.util.Optional;
 
-public interface PasswordStore {
-  Optional<String> getPassword(String propertyName);
+public interface Secret {
+  Optional<String> get();
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecretStoreConfig.java
similarity index 74%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecretStoreConfig.java
index 45b79b3..72b64d5 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecretStoreConfig.java
@@ -18,21 +18,27 @@
  */
 package org.apache.ambari.infra.conf.security;
 
+import static org.apache.ambari.infra.conf.security.HadoopCredentialStore.CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY;
+
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
-import static org.apache.ambari.infra.conf.security.HadoopCredentialStore.CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY;
-
 @Configuration
-public class InfraManagerSecurityConfig {
+public class SecretStoreConfig {
 
   @Value("${"+ CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY + ":}")
   private String credentialStoreProviderPath;
 
+  @Bean
+  public HadoopCredentialStore hadoopCredentialStore() {
+    return new HadoopCredentialStore(credentialStoreProviderPath);
+  }
 
   @Bean
-  public PasswordStore passwords() {
-    return new CompositePasswordStore(new HadoopCredentialStore(credentialStoreProviderPath), new SecurityEnvironment());
+  public SslSecrets sslSecrets(HadoopCredentialStore hadoopCredentialStore) {
+    return new SslSecrets(
+            hadoopCredentialStore.getSecret("infra_manager_keystore_password"),
+            hadoopCredentialStore.getSecret("infra_manager_truststore_password"));
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SslSecrets.java
similarity index 66%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SslSecrets.java
index 8e3387b..6323e95 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SslSecrets.java
@@ -18,11 +18,20 @@
  */
 package org.apache.ambari.infra.conf.security;
 
-import java.util.Optional;
+public class SslSecrets {
+  private final Secret keyStorePassword;
+  private final Secret trustStorePassword;
 
-public class SecurityEnvironment implements PasswordStore {
-  @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+  public SslSecrets(Secret keyStorePassword, Secret trustStorePassword) {
+    this.keyStorePassword = keyStorePassword;
+    this.trustStorePassword = trustStorePassword;
+  }
+
+  public Secret getKeyStorePassword() {
+    return keyStorePassword;
+  }
+
+  public Secret getTrustStorePassword() {
+    return trustStorePassword;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/WebSecurityConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/WebSecurityConfig.java
new file mode 100644
index 0000000..ef59ab1
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/WebSecurityConfig.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.conf.security;
+
+import static java.util.Arrays.asList;
+
+import java.util.Optional;
+
+import javax.inject.Inject;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
+import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+import org.springframework.security.crypto.password.PasswordEncoder;
+import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
+import org.springframework.security.web.util.matcher.OrRequestMatcher;
+import org.springframework.security.web.util.matcher.RequestMatcher;
+
+@Configuration
+@EnableWebSecurity
+public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
+
+  @Value("${infra-manager.admin-user.username:admin}")
+  private String adminUserName;
+
+  @Value("${infra-manager.admin-user.password:@null}")
+  private String adminUserPassword;
+
+  @Override
+  protected void configure(HttpSecurity http) throws Exception {
+    http.csrf().disable().authorizeRequests()
+            .requestMatchers(publicEndpoints())
+            .permitAll()
+            .antMatchers("/**")
+            .hasRole("ADMIN")
+            .and()
+            .httpBasic();
+  }
+
+  private RequestMatcher publicEndpoints() {
+    return new OrRequestMatcher(asList(
+            new AntPathRequestMatcher("/docs/**"),
+            new AntPathRequestMatcher("/swagger-ui/**"),
+            new AntPathRequestMatcher("/api/v1/swagger.yaml")
+    ));
+  }
+
+  @Inject
+  public void configureGlobal(
+          AuthenticationManagerBuilder auth,
+          PasswordEncoder passwordEncoder,
+          HadoopCredentialStore hadoopCredentialStore) throws Exception {
+
+    Secret adminPassword = new CompositeSecret(
+            hadoopCredentialStore.getSecret("infra_manager_admin_user_password"),
+            () -> Optional.ofNullable(adminUserPassword));
+
+    auth.inMemoryAuthentication()
+            .passwordEncoder(passwordEncoder)
+            .withUser(adminUserName)
+            .password(passwordEncoder.encode(adminPassword.get().orElseThrow(() -> new IllegalStateException("Password for admin not set!"))))
+            .roles("ADMIN");
+  }
+
+  @Bean
+  public PasswordEncoder passwordEncoder() {
+    return new BCryptPasswordEncoder();
+  }
+}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
deleted file mode 100644
index e536d9a..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.doc;
-
-import io.swagger.jaxrs.config.BeanConfig;
-import io.swagger.models.Swagger;
-import io.swagger.util.Yaml;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-@Named
-public class InfraManagerApiDocStorage {
-
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerApiDocStorage.class);
-
-  private final Map<String, Object> swaggerMap = new ConcurrentHashMap<>();
-
-  @Inject
-  private BeanConfig beanConfig;
-
-  @PostConstruct
-  private void postConstruct() {
-    Thread loadApiDocThread = new Thread("load_swagger_api_doc") {
-      @Override
-      public void run() {
-        LOG.info("Start thread to scan REST API doc from endpoints.");
-        Swagger swagger = beanConfig.getSwagger();
-        beanConfig.configure(swagger);
-        beanConfig.scanAndRead();
-        setSwagger(swagger);
-        try {
-          if (swagger != null) {
-            String yaml = Yaml.mapper().writeValueAsString(swagger);
-            StringBuilder b = new StringBuilder();
-            String[] parts = yaml.split("\n");
-            for (String part : parts) {
-              b.append(part);
-              b.append("\n");
-            }
-            setSwaggerYaml(b.toString());
-          }
-        } catch (Exception e) {
-          e.printStackTrace();
-        }
-        LOG.info("Scanning REST API endpoints and generating docs has been successful.");
-      }
-    };
-    loadApiDocThread.setDaemon(true);
-    loadApiDocThread.start();
-  }
-
-  public Swagger getSwagger() {
-    return (Swagger) swaggerMap.get("swaggerObject");
-  }
-
-  public void setSwagger(final Swagger swagger) {
-    swaggerMap.put("swaggerObject", swagger);
-  }
-
-  public void setSwaggerYaml(final String swaggerYaml) {
-    swaggerMap.put("swaggerYaml", swaggerYaml);
-  }
-
-  public String getSwaggerYaml() {
-    return (String) swaggerMap.get("swaggerYaml");
-  }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
index 02a6885..8806cf0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
@@ -18,8 +18,12 @@
  */
 package org.apache.ambari.infra.job;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.Map;
+
+import javax.annotation.PostConstruct;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
 import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
@@ -27,18 +31,15 @@ import org.springframework.batch.core.job.builder.JobBuilder;
 import org.springframework.boot.context.event.ApplicationReadyEvent;
 import org.springframework.context.event.EventListener;
 
-import javax.annotation.PostConstruct;
-import java.util.Map;
-
-public abstract class AbstractJobsConfiguration<T extends JobProperties<T>> {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractJobsConfiguration.class);
+public abstract class AbstractJobsConfiguration<TProperties extends JobProperties<TParameters>, TParameters extends Validatable> {
+  private static final Logger logger = LogManager.getLogger(AbstractJobsConfiguration.class);
 
-  private final Map<String, T> propertyMap;
+  private final Map<String, TProperties> propertyMap;
   private final JobScheduler scheduler;
   private final JobBuilderFactory jobs;
   private final JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor;
 
-  protected AbstractJobsConfiguration(Map<String, T> propertyMap, JobScheduler scheduler, JobBuilderFactory jobs, JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) {
+  protected AbstractJobsConfiguration(Map<String, TProperties> propertyMap, JobScheduler scheduler, JobBuilderFactory jobs, JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) {
     this.propertyMap = propertyMap;
     this.scheduler = scheduler;
     this.jobs = jobs;
@@ -55,13 +56,13 @@ public abstract class AbstractJobsConfiguration<T extends JobProperties<T>> {
             .forEach(jobName -> {
               try {
                 propertyMap.get(jobName).validate(jobName);
-                LOG.info("Registering job {}", jobName);
+                logger.info("Registering job {}", jobName);
                 JobBuilder jobBuilder = jobs.get(jobName).listener(new JobsPropertyMap<>(propertyMap));
                 Job job = buildJob(jobBuilder);
                 jobRegistryBeanPostProcessor.postProcessAfterInitialization(job, jobName);
               }
               catch (Exception e) {
-                LOG.warn("Unable to register job " + jobName, e);
+                logger.warn("Unable to register job " + jobName, e);
                 propertyMap.get(jobName).setEnabled(false);
               }
             });
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/InfraJobExecutionDao.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/InfraJobExecutionDao.java
new file mode 100644
index 0000000..903639c
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/InfraJobExecutionDao.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job;
+
+import java.time.OffsetDateTime;
+import java.util.Date;
+
+import javax.inject.Inject;
+
+import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.stereotype.Repository;
+import org.springframework.transaction.support.TransactionTemplate;
+
+@Repository
+public class InfraJobExecutionDao extends AbstractJdbcBatchMetadataDao {
+
+  private final TransactionTemplate transactionTemplate;
+
+  @Inject
+  public InfraJobExecutionDao(JdbcTemplate jdbcTemplate, TransactionTemplate transactionTemplate) {
+    setJdbcTemplate(jdbcTemplate);
+    this.transactionTemplate = transactionTemplate;
+  }
+
+  public void deleteJobExecutions(OffsetDateTime olderThan) {
+    transactionTemplate.execute(transactionStatus -> {
+      Date olderThanDate = Date.from(olderThan.toInstant());
+      deleteStepExecutionContexts(olderThanDate);
+      deleteStepExecutions(olderThanDate);
+      deleteJobExecutionParams(olderThanDate);
+      deleteJobExecutionContexts(olderThanDate);
+      getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%JOB_EXECUTION WHERE CREATE_TIME < ?"), olderThanDate);
+      getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%JOB_INSTANCE WHERE JOB_INSTANCE_ID NOT IN (SELECT JOB_INSTANCE_ID FROM %PREFIX%JOB_EXECUTION)"));
+      return null;
+    });
+  }
+
+  private void deleteStepExecutionContexts(Date olderThan) {
+    getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%STEP_EXECUTION_CONTEXT WHERE STEP_EXECUTION_ID IN (SELECT STEP_EXECUTION_ID FROM %PREFIX%STEP_EXECUTION WHERE JOB_EXECUTION_ID IN (SELECT JOB_EXECUTION_ID FROM %PREFIX%JOB_EXECUTION WHERE CREATE_TIME < ?))"),
+            olderThan);
+  }
+
+  private void deleteStepExecutions(Date olderThan) {
+    getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%STEP_EXECUTION WHERE JOB_EXECUTION_ID IN (SELECT JOB_EXECUTION_ID FROM %PREFIX%JOB_EXECUTION WHERE CREATE_TIME < ?)"),
+            olderThan);
+  }
+
+  private void deleteJobExecutionParams(Date olderThan) {
+    getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%JOB_EXECUTION_PARAMS WHERE JOB_EXECUTION_ID IN (SELECT JOB_EXECUTION_ID FROM %PREFIX%JOB_EXECUTION WHERE CREATE_TIME < ?)"),
+            olderThan);
+  }
+
+  private void deleteJobExecutionContexts(Date olderThan) {
+    getJdbcTemplate().update(getQuery("DELETE FROM %PREFIX%JOB_EXECUTION_CONTEXT WHERE JOB_EXECUTION_ID IN (SELECT JOB_EXECUTION_ID FROM  %PREFIX%JOB_EXECUTION WHERE CREATE_TIME < ?)"),
+            olderThan);
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java
index 79406d0..7be152f 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java
@@ -18,23 +18,15 @@
  */
 package org.apache.ambari.infra.job;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.springframework.batch.core.JobParameters;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
 import java.util.Optional;
 
-public abstract class JobProperties<T extends JobProperties<T>> {
+import org.springframework.batch.core.JobParameters;
+
+public abstract class JobProperties<TParameters extends Validatable> {
 
   private SchedulingProperties scheduling;
-  private final Class<T> clazz;
   private boolean enabled;
 
-  protected JobProperties(Class<T> clazz) {
-    this.clazz = clazz;
-  }
-
   public SchedulingProperties getScheduling() {
     return scheduling;
   }
@@ -49,23 +41,11 @@ public abstract class JobProperties<T extends JobProperties<T>> {
     this.scheduling = scheduling;
   }
 
-  public T deepCopy() {
-    try {
-      ObjectMapper objectMapper = new ObjectMapper();
-      String json = objectMapper.writeValueAsString(this);
-      return objectMapper.readValue(json, clazz);
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-
-  public abstract void apply(JobParameters jobParameters);
-
-  public abstract void validate();
+  public abstract TParameters merge(JobParameters jobParameters);
 
   public void validate(String jobName) {
     try {
-      validate();
+      merge(new JobParameters()).validate();
     }
     catch (Exception ex) {
       throw new JobConfigurationException(String.format("Configuration of job %s is invalid: %s!", jobName, ex.getMessage()), ex);
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobPropertiesHolder.java
similarity index 66%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobPropertiesHolder.java
index 094e797..67cdafa 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobPropertiesHolder.java
@@ -18,32 +18,27 @@
  */
 package org.apache.ambari.infra.job;
 
+import static org.apache.ambari.infra.job.JobsPropertyMap.PARAMETERS_CONTEXT_KEY;
+
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobExecutionListener;
 
-import java.util.Map;
-
-public class JobsPropertyMap<T extends JobProperties<T>> implements JobExecutionListener {
+public class JobPropertiesHolder<T extends Validatable>
+        implements JobExecutionListener {
 
-  private final Map<String, T> propertyMap;
+  private final JobProperties<T> defaultProperties;
 
-  public JobsPropertyMap(Map<String, T> propertyMap) {
-    this.propertyMap = propertyMap;
+  public JobPropertiesHolder(JobProperties<T> defaultProperties) {
+    this.defaultProperties = defaultProperties;
   }
 
   @Override
   public void beforeJob(JobExecution jobExecution) {
     try {
-      String jobName = jobExecution.getJobInstance().getJobName();
-      T defaultProperties = propertyMap.get(jobName);
-      if (defaultProperties == null)
-        throw new UnsupportedOperationException("Properties not found for job " + jobName);
-
-      T properties = defaultProperties.deepCopy();
-      properties.apply(jobExecution.getJobParameters());
-      properties.validate(jobName);
-      jobExecution.getExecutionContext().put("jobProperties", properties);
+      T parameters = defaultProperties.merge(jobExecution.getJobParameters());
+      parameters.validate();
+      jobExecution.getExecutionContext().put(PARAMETERS_CONTEXT_KEY, parameters);
     }
     catch (UnsupportedOperationException | IllegalArgumentException ex) {
       jobExecution.stop();
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
index 324c0b3..4edfae9 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
@@ -18,9 +18,14 @@
  */
 package org.apache.ambari.infra.job;
 
+import java.util.Date;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
 import org.apache.ambari.infra.manager.Jobs;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -33,17 +38,9 @@ import org.springframework.batch.core.repository.JobRestartException;
 import org.springframework.scheduling.TaskScheduler;
 import org.springframework.scheduling.support.CronTrigger;
 
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.time.Duration;
-import java.time.OffsetDateTime;
-
-import static org.apache.ambari.infra.job.archive.FileNameSuffixFormatter.SOLR_DATETIME_FORMATTER;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
 @Named
 public class JobScheduler {
-  private static final Logger LOG = LoggerFactory.getLogger(JobScheduler.class);
+  private static final Logger logger = LogManager.getLogger(JobScheduler.class);
 
   private final TaskScheduler scheduler;
   private final Jobs jobs;
@@ -61,26 +58,26 @@ public class JobScheduler {
       throw new RuntimeException(e);
     }
 
-    scheduler.schedule(() -> launchJob(jobName, schedulingProperties.getIntervalEndDelta()), new CronTrigger(schedulingProperties.getCron()));
-    LOG.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
+    scheduler.schedule(() -> launchJob(jobName), new CronTrigger(schedulingProperties.getCron()));
+    logger.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
   }
 
   private void restartIfFailed(JobExecution jobExecution) {
-    if (jobExecution.getExitStatus() == ExitStatus.FAILED) {
-      try {
+    try {
+      if (ExitStatus.FAILED.compareTo(jobExecution.getExitStatus()) == 0) {
         jobs.restart(jobExecution.getId());
-      } catch (JobInstanceAlreadyCompleteException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | NoSuchJobExecutionException e) {
-        throw new RuntimeException(e);
+      } else if (ExitStatus.UNKNOWN.compareTo(jobExecution.getExitStatus()) == 0) {
+        jobs.stopAndAbandon(jobExecution.getId());
       }
+    } catch (JobInstanceAlreadyCompleteException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | NoSuchJobExecutionException e) {
+      throw new RuntimeException(e);
     }
   }
 
-  private void launchJob(String jobName, String endDelta) {
+  private void launchJob(String jobName) {
     try {
       JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
-      if (!isBlank(endDelta))
-        jobParametersBuilder.addString("end", SOLR_DATETIME_FORMATTER.format(OffsetDateTime.now().minus(Duration.parse(endDelta))));
-
+      jobParametersBuilder.addDate("scheduledLaunchAt", new Date());
       jobs.launchJob(jobName, jobParametersBuilder.toJobParameters());
     } catch (JobParametersInvalidException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) {
       throw new RuntimeException(e);
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
index 094e797..0eb5908 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
@@ -18,38 +18,29 @@
  */
 package org.apache.ambari.infra.job;
 
-import org.springframework.batch.core.ExitStatus;
+import java.util.Map;
+
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobExecutionListener;
 
-import java.util.Map;
+public class JobsPropertyMap<TProperties extends JobProperties<TParameters>, TParameters extends Validatable>
+        implements JobExecutionListener {
 
-public class JobsPropertyMap<T extends JobProperties<T>> implements JobExecutionListener {
+  public static final String PARAMETERS_CONTEXT_KEY = "jobParameters";
+  private final Map<String, TProperties> propertyMap;
 
-  private final Map<String, T> propertyMap;
-
-  public JobsPropertyMap(Map<String, T> propertyMap) {
+  public JobsPropertyMap(Map<String, TProperties> propertyMap) {
     this.propertyMap = propertyMap;
   }
 
   @Override
   public void beforeJob(JobExecution jobExecution) {
-    try {
-      String jobName = jobExecution.getJobInstance().getJobName();
-      T defaultProperties = propertyMap.get(jobName);
-      if (defaultProperties == null)
-        throw new UnsupportedOperationException("Properties not found for job " + jobName);
-
-      T properties = defaultProperties.deepCopy();
-      properties.apply(jobExecution.getJobParameters());
-      properties.validate(jobName);
-      jobExecution.getExecutionContext().put("jobProperties", properties);
-    }
-    catch (UnsupportedOperationException | IllegalArgumentException ex) {
-      jobExecution.stop();
-      jobExecution.setExitStatus(new ExitStatus(ExitStatus.FAILED.getExitCode(), ex.getMessage()));
-      throw ex;
-    }
+    String jobName = jobExecution.getJobInstance().getJobName();
+    TProperties defaultProperties = propertyMap.get(jobName);
+    if (defaultProperties == null)
+      throw new UnsupportedOperationException("Properties not found for job " + jobName);
+
+    new JobPropertiesHolder<>(defaultProperties).beforeJob(jobExecution);
   }
 
   @Override
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java
index af81b4f..2f18c55 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java
@@ -21,7 +21,6 @@ package org.apache.ambari.infra.job;
 public class SchedulingProperties {
   private boolean enabled = false;
   private String cron;
-  private String intervalEndDelta;
 
   public boolean isEnabled() {
     return enabled;
@@ -38,12 +37,4 @@ public class SchedulingProperties {
   public void setCron(String cron) {
     this.cron = cron;
   }
-
-  public String getIntervalEndDelta() {
-    return intervalEndDelta;
-  }
-
-  public void setIntervalEndDelta(String intervalEndDelta) {
-    this.intervalEndDelta = intervalEndDelta;
-  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
index 3ac5b05..5569cf0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
@@ -18,16 +18,21 @@
  */
 package org.apache.ambari.infra.job;
 
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.io.UncheckedIOException;
+import java.net.InetSocketAddress;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.zookeeper.client.ConnectStringParser;
 
 public abstract class SolrDAOBase {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAOBase.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAOBase.class);
 
   private final String zooKeeperConnectionString;
   private final String defaultCollection;
@@ -40,14 +45,14 @@ public abstract class SolrDAOBase {
   protected void delete(String deleteQueryText) {
     try (CloudSolrClient client = createClient()) {
       try {
-        LOG.info("Executing solr delete by query {}", deleteQueryText);
+        logger.info("Executing solr delete by query {}", deleteQueryText);
         client.deleteByQuery(deleteQueryText);
         client.commit();
       } catch (Exception e) {
         try {
           client.rollback();
         } catch (SolrServerException e1) {
-          LOG.warn("Unable to rollback after solr delete operation failure.", e1);
+          logger.warn("Unable to rollback after solr delete operation failure.", e1);
         }
         throw new RuntimeException(e);
       }
@@ -57,7 +62,12 @@ public abstract class SolrDAOBase {
   }
 
   protected CloudSolrClient createClient() {
-    CloudSolrClient client = new CloudSolrClient.Builder().withZkHost(zooKeeperConnectionString).build();
+    ConnectStringParser connectStringParser = new ConnectStringParser(zooKeeperConnectionString);
+    List<String> zkHosts = connectStringParser.getServerAddresses().stream()
+            .map(InetSocketAddress::toString)
+            .collect(Collectors.toList());
+    CloudSolrClient client = new CloudSolrClient.Builder(
+            zkHosts, Optional.ofNullable(connectStringParser.getChrootPath())).build();
     client.setDefaultCollection(defaultCollection);
     return client;
   }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/Validatable.java
similarity index 80%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/Validatable.java
index ef9d539..5c04406 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/Validatable.java
@@ -16,10 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.job.archive;
+package org.apache.ambari.infra.job;
 
-public class S3AccessCsvFormatException extends RuntimeException {
-  public S3AccessCsvFormatException(String message) {
-    super(message);
-  }
+public interface Validatable {
+  void validate();
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
index 3df18b6..3f89ec8 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
@@ -18,19 +18,19 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
 public abstract class AbstractFileAction implements FileAction {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractFileAction.class);
+  private static final Logger logger = LogManager.getLogger(AbstractFileAction.class);
 
   @Override
   public File perform(File inputFile) {
     File outputFile =  onPerform(inputFile);
     if (!inputFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
     return outputFile;
   }
 
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
similarity index 50%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
index b26da36..39ac398 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
@@ -18,18 +18,29 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.ambari.infra.job.JobProperties;
-import org.springframework.batch.core.JobParameters;
-
-import java.util.Optional;
-
 import static java.util.Objects.requireNonNull;
 import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS;
 import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL;
-import static org.apache.ambari.infra.job.archive.ExportDestination.S3;
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
+import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
-public class DocumentArchivingProperties extends JobProperties<DocumentArchivingProperties> {
+import java.time.Duration;
+import java.util.Optional;
+
+import org.apache.ambari.infra.job.JobProperties;
+import org.apache.ambari.infra.job.Validatable;
+import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.FsPermissionToStringConverter;
+import org.apache.ambari.infra.json.StringToDurationConverter;
+import org.apache.ambari.infra.json.StringToFsPermissionConverter;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.springframework.batch.core.JobParameters;
+
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+
+public class ArchivingProperties extends JobProperties<ArchivingProperties> implements Validatable {
   private int readBlockSize;
   private int writeBlockSize;
   private ExportDestination destination;
@@ -37,17 +48,18 @@ public class DocumentArchivingProperties extends JobProperties<DocumentArchiving
   private String fileNameSuffixColumn;
   private String fileNameSuffixDateFormat;
   private SolrProperties solr;
-  private String s3AccessFile;
-  private String s3KeyPrefix;
-  private String s3BucketName;
-  private String s3Endpoint;
-
   private String hdfsEndpoint;
   private String hdfsDestinationDirectory;
-
-  public DocumentArchivingProperties() {
-    super(DocumentArchivingProperties.class);
-  }
+  @JsonSerialize(converter = FsPermissionToStringConverter.class)
+  @JsonDeserialize(converter = StringToFsPermissionConverter.class)
+  private FsPermission hdfsFilePermission;
+  private String hdfsKerberosPrincipal;
+  private String hdfsKerberosKeytabPath;
+  private String start;
+  private String end;
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @JsonDeserialize(converter = StringToDurationConverter.class)
+  private Duration ttl;
 
   public int getReadBlockSize() {
     return readBlockSize;
@@ -101,97 +113,92 @@ public class DocumentArchivingProperties extends JobProperties<DocumentArchiving
     return solr;
   }
 
-  public void setSolr(SolrProperties query) {
-    this.solr = query;
+  public void setSolr(SolrProperties solr) {
+    this.solr = solr;
   }
 
-  public String getS3AccessFile() {
-    return s3AccessFile;
+  public String getHdfsEndpoint() {
+    return hdfsEndpoint;
+  }
+
+  public void setHdfsEndpoint(String hdfsEndpoint) {
+    this.hdfsEndpoint = hdfsEndpoint;
+  }
+
+  public String getHdfsDestinationDirectory() {
+    return hdfsDestinationDirectory;
   }
 
-  public void setS3AccessFile(String s3AccessFile) {
-    this.s3AccessFile = s3AccessFile;
+  public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) {
+    this.hdfsDestinationDirectory = hdfsDestinationDirectory;
   }
 
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
+  public FsPermission getHdfsFilePermission() {
+    return hdfsFilePermission;
   }
 
-  public void setS3KeyPrefix(String s3KeyPrefix) {
-    this.s3KeyPrefix = s3KeyPrefix;
+  public void setHdfsFilePermission(FsPermission hdfsFilePermission) {
+    this.hdfsFilePermission = hdfsFilePermission;
   }
 
-  public String getS3BucketName() {
-    return s3BucketName;
+  public String getHdfsKerberosPrincipal() {
+    return hdfsKerberosPrincipal;
   }
 
-  public void setS3BucketName(String s3BucketName) {
-    this.s3BucketName = s3BucketName;
+  public void setHdfsKerberosPrincipal(String hdfsKerberosPrincipal) {
+    this.hdfsKerberosPrincipal = hdfsKerberosPrincipal;
   }
 
-  public String getS3Endpoint() {
-    return s3Endpoint;
+  public String getHdfsKerberosKeytabPath() {
+    return hdfsKerberosKeytabPath;
   }
 
-  public void setS3Endpoint(String s3Endpoint) {
-    this.s3Endpoint = s3Endpoint;
+  public void setHdfsKerberosKeytabPath(String hdfsKerberosKeytabPath) {
+    this.hdfsKerberosKeytabPath = hdfsKerberosKeytabPath;
   }
 
-  public Optional<S3Properties> s3Properties() {
-    if (isBlank(s3BucketName))
+  public Optional<HdfsProperties> hdfsProperties() {
+    if (isBlank(hdfsDestinationDirectory))
       return Optional.empty();
 
-    return Optional.of(new S3Properties(
-            s3AccessFile,
-            s3KeyPrefix,
-            s3BucketName,
-            s3Endpoint));
+    return Optional.of(new HdfsProperties(
+            hdfsEndpoint,
+            hdfsDestinationDirectory,
+            hdfsFilePermission,
+            hdfsKerberosPrincipal,
+            hdfsKerberosKeytabPath));
   }
 
-  public String getHdfsEndpoint() {
-    return hdfsEndpoint;
+  public String getStart() {
+    return start;
   }
 
-  public void setHdfsEndpoint(String hdfsEndpoint) {
-    this.hdfsEndpoint = hdfsEndpoint;
+  public void setStart(String start) {
+    this.start = start;
   }
 
-  public String getHdfsDestinationDirectory() {
-    return hdfsDestinationDirectory;
+  public String getEnd() {
+    return end;
   }
 
-  public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) {
-    this.hdfsDestinationDirectory = hdfsDestinationDirectory;
+  public void setEnd(String end) {
+    this.end = end;
   }
 
-  @Override
-  public void apply(JobParameters jobParameters) {
-    readBlockSize = getIntJobParameter(jobParameters, "readBlockSize", readBlockSize);
-    writeBlockSize = getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize);
-    destination = ExportDestination.valueOf(jobParameters.getString("destination", destination.name()));
-    localDestinationDirectory = jobParameters.getString("localDestinationDirectory", localDestinationDirectory);
-    s3AccessFile = jobParameters.getString("s3AccessFile", s3AccessFile);
-    s3BucketName = jobParameters.getString("s3BucketName", s3BucketName);
-    s3KeyPrefix = jobParameters.getString("s3KeyPrefix", s3KeyPrefix);
-    s3Endpoint = jobParameters.getString("s3Endpoint", s3Endpoint);
-    hdfsEndpoint = jobParameters.getString("hdfsEndpoint", hdfsEndpoint);
-    hdfsDestinationDirectory = jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory);
-    solr.apply(jobParameters);
+  public Duration getTtl() {
+    return ttl;
   }
 
-  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
-    String valueText = jobParameters.getString(parameterName);
-    if (isBlank(valueText))
-      return defaultValue;
-    return Integer.parseInt(valueText);
+  public void setTtl(Duration ttl) {
+    this.ttl = ttl;
   }
 
   @Override
   public void validate() {
-    if (readBlockSize == 0)
+    if (readBlockSize <= 0)
       throw new IllegalArgumentException("The property readBlockSize must be greater than 0!");
 
-    if (writeBlockSize == 0)
+    if (writeBlockSize <= 0)
       throw new IllegalArgumentException("The property writeBlockSize must be greater than 0!");
 
     if (isBlank(fileNameSuffixColumn)) {
@@ -206,22 +213,41 @@ public class DocumentArchivingProperties extends JobProperties<DocumentArchiving
                   "The property localDestinationDirectory can not be null or empty string when destination is set to %s!", LOCAL.name()));
         break;
 
-      case S3:
-        s3Properties()
-                .orElseThrow(() -> new IllegalArgumentException("S3 related properties must be set if the destination is " + S3.name()))
-                .validate();
-        break;
-
       case HDFS:
-        if (isBlank(hdfsEndpoint))
-          throw new IllegalArgumentException(String.format(
-                  "The property hdfsEndpoint can not be null or empty string when destination is set to %s!", HDFS.name()));
-        if (isBlank(hdfsDestinationDirectory))
-          throw new IllegalArgumentException(String.format(
-                  "The property hdfsDestinationDirectory can not be null or empty string when destination is set to %s!", HDFS.name()));
+        hdfsProperties()
+                .orElseThrow(() -> new IllegalArgumentException("HDFS related properties must be set if the destination is " + HDFS.name()))
+                .validate();
     }
 
     requireNonNull(solr, "No solr query was specified for archiving job!");
     solr.validate();
   }
+
+  @Override
+  public ArchivingProperties merge(JobParameters jobParameters) {
+    ArchivingProperties archivingProperties = new ArchivingProperties();
+    archivingProperties.setReadBlockSize(getIntJobParameter(jobParameters, "readBlockSize", readBlockSize));
+    archivingProperties.setWriteBlockSize(getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize));
+    archivingProperties.setDestination(ExportDestination.valueOf(jobParameters.getString("destination", destination.name())));
+    archivingProperties.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
+    archivingProperties.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
+    archivingProperties.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
+    archivingProperties.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
+    archivingProperties.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
+    archivingProperties.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
+    archivingProperties.setHdfsKerberosPrincipal(jobParameters.getString("hdfsKerberosPrincipal", hdfsKerberosPrincipal));
+    archivingProperties.setHdfsKerberosKeytabPath(jobParameters.getString("hdfsKerberosKeytabPath", hdfsKerberosKeytabPath));
+    archivingProperties.setSolr(solr.merge(jobParameters));
+    archivingProperties.setStart(jobParameters.getString("start"));
+    archivingProperties.setEnd(jobParameters.getString("end"));
+    archivingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return archivingProperties;
+  }
+
+  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
+    String valueText = jobParameters.getString(parameterName);
+    if (isBlank(valueText))
+      return defaultValue;
+    return Integer.parseInt(valueText);
+  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
similarity index 52%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
index 0f7b99f..ac1ca6b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
@@ -18,38 +18,27 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.UncheckedIOException;
 
-public class HdfsUploader extends AbstractFileAction {
-
-  private final Configuration configuration;
-  private final Path destinationDirectory;
-
-  public HdfsUploader(Configuration configuration, Path destinationDirectory) {
-    this.destinationDirectory = destinationDirectory;
-    this.configuration = configuration;
-  }
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
+import org.apache.commons.io.IOUtils;
 
+public class BZip2Compressor extends AbstractFileAction {
   @Override
   protected File onPerform(File inputFile) {
-    try (FileSystem fileSystem = FileSystem.get(configuration)) {
-      Path destination = new Path(destinationDirectory, inputFile.getName());
-      if (fileSystem.exists(destination)) {
-        throw new UnsupportedOperationException(String.format("File '%s' already exists!", destination));
+    File bz2File = new File(inputFile.getParent(), inputFile.getName() + ".bz2");
+    try (BZip2CompressorOutputStream bZip2CompressorOutputStream = new BZip2CompressorOutputStream(new FileOutputStream(bz2File))) {
+      try (FileInputStream fileInputStream = new FileInputStream(inputFile)) {
+        IOUtils.copy(fileInputStream, bZip2CompressorOutputStream);
       }
-
-      fileSystem.copyFromLocalFile(new Path(inputFile.getAbsolutePath()), destination);
-
-      return inputFile;
     }
-    catch (IOException e) {
-      throw new UncheckedIOException(e);
+    catch (IOException ex) {
+      throw new UncheckedIOException(ex);
     }
+    return bz2File;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
index 5ff9587..acf19c0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
@@ -18,36 +18,37 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
+import static java.util.Collections.unmodifiableMap;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import static java.util.Collections.unmodifiableMap;
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 
 public class Document {
-  private final Map<String, String> fieldMap;
+  private final Map<String, Object> fieldMap;
 
   private Document() {
     fieldMap = new HashMap<>();
   }
 
-  public Document(Map<String, String> fieldMap) {
+  public Document(Map<String, Object> fieldMap) {
     this.fieldMap = unmodifiableMap(fieldMap);
   }
 
-  public String get(String key) {
-    return fieldMap.get(key);
+  public String getString(String key) {
+    Object value = fieldMap.get(key);
+    return value == null ? null : value.toString();
   }
 
   @JsonAnyGetter
-  public Map<String, String> getFieldMap() {
+  public Map<String, Object> getFieldMap() {
     return fieldMap;
   }
 
   @JsonAnySetter
-  private void put(String key, String value) {
+  private void put(String key, Object value) {
     fieldMap.put(key, value);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
index 8358dd0..22040a9 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
@@ -18,15 +18,21 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import static org.apache.ambari.infra.job.JobsPropertyMap.PARAMETERS_CONTEXT_KEY;
+import static org.apache.ambari.infra.job.archive.SolrQueryBuilder.computeEnd;
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+import java.io.File;
+
+import javax.inject.Inject;
+
 import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.apache.ambari.infra.conf.security.PasswordStore;
 import org.apache.ambari.infra.job.AbstractJobsConfiguration;
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.apache.ambari.infra.job.JobScheduler;
 import org.apache.ambari.infra.job.ObjectSource;
-import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.Step;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
@@ -40,14 +46,9 @@ import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
-import javax.inject.Inject;
-import java.io.File;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
 @Configuration
-public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<DocumentArchivingProperties> {
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentArchivingConfiguration.class);
+public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<ArchivingProperties, ArchivingProperties> {
+  private static final Logger logger = LogManager.getLogger(DocumentArchivingConfiguration.class);
   private static final DocumentWiper NOT_DELETE = (firstDocument, lastDocument) -> { };
 
   private final StepBuilderFactory steps;
@@ -83,87 +84,80 @@ public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<Do
   @StepScope
   public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
                                            @Value("#{stepExecution.jobExecution.jobId}") String jobId,
-                                           @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties,
+                                           @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                            InfraManagerDataConfig infraManagerDataConfig,
                                            @Value("#{jobParameters[end]}") String intervalEnd,
                                            DocumentWiper documentWiper,
-                                           JobContextRepository jobContextRepository,
-                                           PasswordStore passwordStore) {
+                                           JobContextRepository jobContextRepository) {
 
     File baseDir = new File(infraManagerDataConfig.getDataFolder(), "exporting");
-    CompositeFileAction fileAction = new CompositeFileAction(new TarGzCompressor());
-    switch (properties.getDestination()) {
-      case S3:
-        fileAction.add(new S3Uploader(
-                properties.s3Properties().orElseThrow(() -> new IllegalStateException("S3 properties are not provided!")),
-                passwordStore));
-        break;
+    CompositeFileAction fileAction = new CompositeFileAction(new BZip2Compressor());
+    switch (parameters.getDestination()) {
       case HDFS:
         org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
-        conf.set("fs.defaultFS", properties.getHdfsEndpoint());
-        fileAction.add(new HdfsUploader(conf, new Path(properties.getHdfsDestinationDirectory())));
+        fileAction.add(new HdfsUploader(conf,
+                parameters.hdfsProperties().orElseThrow(() -> new IllegalStateException("HDFS properties are not provided!"))));
         break;
       case LOCAL:
-        baseDir = new File(properties.getLocalDestinationDirectory());
+        baseDir = new File(parameters.getLocalDestinationDirectory());
         break;
     }
 
-    FileNameSuffixFormatter fileNameSuffixFormatter = FileNameSuffixFormatter.from(properties);
+    FileNameSuffixFormatter fileNameSuffixFormatter = FileNameSuffixFormatter.from(parameters);
     LocalItemWriterListener itemWriterListener = new LocalItemWriterListener(fileAction, documentWiper);
     File destinationDirectory = new File(
             baseDir,
             String.format("%s_%s_%s",
-                    properties.getSolr().getCollection(),
+                    parameters.getSolr().getCollection(),
                     jobId,
                     isBlank(intervalEnd) ? "" : fileNameSuffixFormatter.format(intervalEnd)));
-    LOG.info("Destination directory path={}", destinationDirectory);
+    logger.info("Destination directory path={}", destinationDirectory);
     if (!destinationDirectory.exists()) {
       if (!destinationDirectory.mkdirs()) {
-        LOG.warn("Unable to create directory {}", destinationDirectory);
+        logger.warn("Unable to create directory {}", destinationDirectory);
       }
     }
 
     return new DocumentExporter(
             documentItemReader,
             firstDocument -> new LocalDocumentItemWriter(
-                    outFile(properties.getSolr().getCollection(), destinationDirectory, fileNameSuffixFormatter.format(firstDocument)), itemWriterListener),
-            properties.getWriteBlockSize(), jobContextRepository);
+                    outFile(parameters.getSolr().getCollection(), destinationDirectory, fileNameSuffixFormatter.format(firstDocument)), itemWriterListener),
+            parameters.getWriteBlockSize(), jobContextRepository);
   }
 
   @Bean
   @StepScope
-  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties,
+  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                      SolrDAO solrDAO) {
-    if (isBlank(properties.getSolr().getDeleteQueryText()))
+    if (isBlank(parameters.getSolr().getDeleteQueryText()))
       return NOT_DELETE;
     return solrDAO;
   }
 
   @Bean
   @StepScope
-  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) {
-    return new SolrDAO(properties.getSolr());
+  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters) {
+    return new SolrDAO(parameters.getSolr());
   }
 
   private File outFile(String collection, File directoryPath, String suffix) {
     File file = new File(directoryPath, String.format("%s_-_%s.json", collection, suffix));
-    LOG.info("Exporting to temp file {}", file.getAbsolutePath());
+    logger.info("Exporting to temp file {}", file.getAbsolutePath());
     return file;
   }
 
   @Bean
   @StepScope
   public DocumentItemReader reader(ObjectSource<Document> documentSource,
-                                   @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) {
+                                   @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties properties) {
     return new DocumentItemReader(documentSource, properties.getReadBlockSize());
   }
 
   @Bean
   @StepScope
-  public ObjectSource<Document> logSource(@Value("#{jobParameters[start]}") String start,
-                                          @Value("#{jobParameters[end]}") String end,
-                                          SolrDAO solrDAO) {
+  public ObjectSource<Document> documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
+                                               SolrDAO solrDAO) {
 
-    return new SolrDocumentSource(solrDAO, start, end);
+    return new SolrDocumentSource(solrDAO, parameters.getStart(), computeEnd(parameters.getEnd(), parameters.getTtl()));
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
index a009031..253fc40 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentArchivingPropertyMap {
-  private Map<String, DocumentArchivingProperties> solrDataArchiving;
+  private Map<String, ArchivingProperties> solrDataArchiving;
 
-  public Map<String, DocumentArchivingProperties> getSolrDataArchiving() {
+  public Map<String, ArchivingProperties> getSolrDataArchiving() {
     return solrDataArchiving;
   }
 
-  public void setSolrDataArchiving(Map<String, DocumentArchivingProperties> solrDataArchiving) {
+  public void setSolrDataArchiving(Map<String, ArchivingProperties> solrDataArchiving) {
     this.solrDataArchiving = solrDataArchiving;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
index d87fdea..8c4a067 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
@@ -19,9 +19,8 @@
 package org.apache.ambari.infra.job.archive;
 
 import org.apache.ambari.infra.job.JobContextRepository;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.BatchStatus;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepContribution;
 import org.springframework.batch.core.StepExecution;
@@ -31,10 +30,11 @@ import org.springframework.batch.core.step.tasklet.Tasklet;
 import org.springframework.batch.item.ExecutionContext;
 import org.springframework.batch.item.ItemStreamReader;
 import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.lang.NonNull;
 
 public class DocumentExporter implements Tasklet, StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentExporter.class);
+  private static final Logger logger = LogManager.getLogger(DocumentExporter.class);
 
   private boolean complete = false;
   private final ItemStreamReader<Document> documentReader;
@@ -50,12 +50,12 @@ public class DocumentExporter implements Tasklet, StepExecutionListener {
   }
 
   @Override
-  public void beforeStep(StepExecution stepExecution) {
+  public void beforeStep(@NonNull StepExecution stepExecution) {
 
   }
 
   @Override
-  public ExitStatus afterStep(StepExecution stepExecution) {
+  public ExitStatus afterStep(@NonNull StepExecution stepExecution) {
     if (complete) {
       return ExitStatus.COMPLETED;
     }
@@ -65,7 +65,7 @@ public class DocumentExporter implements Tasklet, StepExecutionListener {
   }
 
   @Override
-  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+  public RepeatStatus execute(@NonNull StepContribution contribution, @NonNull ChunkContext chunkContext) throws Exception {
     StepExecution stepExecution = chunkContext.getStepContext().getStepExecution();
     ExecutionContext executionContext = stepExecution.getExecutionContext();
     documentReader.open(executionContext);
@@ -77,8 +77,8 @@ public class DocumentExporter implements Tasklet, StepExecutionListener {
       while ((document = documentReader.read()) != null) {
         if (writer != null && writtenCount >= writeBlockSize) {
           stepExecution = jobContextRepository.getStepExecution(stepExecution.getJobExecutionId(), stepExecution.getId());
-          if (stepExecution.getJobExecution().getStatus() == BatchStatus.STOPPING) {
-            LOG.info("Received stop signal.");
+          if (stepExecution.isTerminateOnly()) {
+            logger.info("Received stop signal.");
             writer.revert();
             writer = null;
             return RepeatStatus.CONTINUABLE;
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
index a143e4c..703d7fb 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
@@ -20,6 +20,5 @@ package org.apache.ambari.infra.job.archive;
 
 public enum ExportDestination {
   LOCAL,
-  HDFS,
-  S3
+  HDFS
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
index f9016e6..65b518f 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
@@ -18,17 +18,17 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import java.time.OffsetDateTime;
-import java.time.format.DateTimeFormatter;
-
 import static java.util.Objects.requireNonNull;
 import static org.apache.ambari.infra.job.archive.SolrDocumentIterator.SOLR_DATE_FORMAT_TEXT;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
+import java.time.OffsetDateTime;
+import java.time.format.DateTimeFormatter;
+
 public class FileNameSuffixFormatter {
   public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern(SOLR_DATE_FORMAT_TEXT);
 
-  public static FileNameSuffixFormatter from(DocumentArchivingProperties properties) {
+  public static FileNameSuffixFormatter from(ArchivingProperties properties) {
     return new FileNameSuffixFormatter(properties.getFileNameSuffixColumn(), properties.getFileNameSuffixDateFormat());
   }
 
@@ -45,10 +45,10 @@ public class FileNameSuffixFormatter {
   public String format(Document document) {
     requireNonNull(document, "Can not format file name suffix: input document is null!");
 
-    if (isBlank(document.get(columnName)))
+    if (isBlank(document.getString(columnName)))
       throw new IllegalArgumentException("The specified document does not have a column " + columnName + " or it's value is blank!");
 
-    return format(document.get(columnName));
+    return format(document.getString(columnName));
   }
 
   public String format(String value) {
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsProperties.java
new file mode 100644
index 0000000..da4137f
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsProperties.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import static org.apache.commons.lang.StringUtils.isBlank;
+import static org.apache.commons.lang.StringUtils.isNotBlank;
+
+import org.apache.hadoop.fs.permission.FsPermission;
+
+public class HdfsProperties {
+  private static final String DEFAULT_FILE_PERMISSION = "640";
+
+  private final String hdfsEndpoint;
+  private final String hdfsDestinationDirectory;
+  private final FsPermission hdfsFilePermission;
+  private final String hdfsKerberosPrincipal;
+  private final String hdfsKerberosKeytabPath;
+
+  public HdfsProperties(String hdfsEndpoint, String hdfsDestinationDirectory, FsPermission hdfsFilePermission, String hdfsKerberosPrincipal, String hdfsKerberosKeytabPath) {
+    this.hdfsEndpoint = hdfsEndpoint;
+    this.hdfsDestinationDirectory = hdfsDestinationDirectory;
+    this.hdfsFilePermission = hdfsFilePermission == null ? new FsPermission(DEFAULT_FILE_PERMISSION) : hdfsFilePermission;
+    this.hdfsKerberosPrincipal = hdfsKerberosPrincipal;
+    this.hdfsKerberosKeytabPath = hdfsKerberosKeytabPath;
+  }
+
+  public String getHdfsEndpoint() {
+    return hdfsEndpoint;
+  }
+
+  public String getHdfsDestinationDirectory() {
+    return hdfsDestinationDirectory;
+  }
+
+  public FsPermission getHdfsFilePermission() {
+    return hdfsFilePermission;
+  }
+
+  public String getHdfsKerberosPrincipal() {
+    return hdfsKerberosPrincipal;
+  }
+
+  public String getHdfsKerberosKeytabPath() {
+    return hdfsKerberosKeytabPath;
+  }
+
+  @Override
+  public String toString() {
+    return "HdfsProperties{" +
+            "hdfsEndpoint='" + hdfsEndpoint + '\'' +
+            ", hdfsDestinationDirectory='" + hdfsDestinationDirectory + '\'' +
+            ", hdfsFilePermission=" + hdfsFilePermission +
+            ", hdfsKerberosPrincipal='" + hdfsKerberosPrincipal + '\'' +
+            ", hdfsKerberosKeytabPath='" + hdfsKerberosKeytabPath + '\'' +
+            '}';
+  }
+
+  public void validate() {
+    if (isBlank(hdfsDestinationDirectory))
+      throw new IllegalArgumentException("The property hdfsDestinationDirectory can not be null or empty string!");
+
+    if (isNotBlank(hdfsKerberosPrincipal) && isBlank(hdfsKerberosKeytabPath))
+      throw new IllegalArgumentException("The property hdfsKerberosPrincipal is specified but hdfsKerberosKeytabPath is blank!");
+
+    if (isBlank(hdfsKerberosPrincipal) && isNotBlank(hdfsKerberosKeytabPath))
+      throw new IllegalArgumentException("The property hdfsKerberosKeytabPath is specified but hdfsKerberosPrincipal is blank!");
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
index 0f7b99f..ff48673 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
@@ -18,33 +18,68 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
 
 import java.io.File;
 import java.io.IOException;
 import java.io.UncheckedIOException;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.core.io.ClassPathResource;
+
 public class HdfsUploader extends AbstractFileAction {
+  private static final Logger LOG = LoggerFactory.getLogger(HdfsUploader.class);
 
   private final Configuration configuration;
-  private final Path destinationDirectory;
+  private final HdfsProperties properties;
 
-  public HdfsUploader(Configuration configuration, Path destinationDirectory) {
-    this.destinationDirectory = destinationDirectory;
+  public HdfsUploader(Configuration configuration, HdfsProperties properties) {
+    this.properties = properties;
     this.configuration = configuration;
+
+    if (new ClassPathResource("core-site.xml").exists()) {
+      LOG.info("Hdfs core-site.xml is found in the classpath.");
+    }
+    else {
+      LOG.warn("Hdfs core-site.xml is not found in the classpath. Using defaults.");
+    }
+    if (new ClassPathResource("hdfs-site.xml").exists()) {
+      LOG.info("Hdfs hdfs-site.xml is found in the classpath.");
+    }
+    else {
+      LOG.warn("Hdfs hdfs-site.xml is not found in the classpath. Using defaults.");
+    }
+    if (isNotBlank(properties.getHdfsEndpoint())) {
+      LOG.info("Hdfs endpoint is defined in Infra Manager properties. Setting fs.defaultFS to {}", properties.getHdfsEndpoint());
+      this.configuration.set("fs.defaultFS", properties.getHdfsEndpoint());
+    }
+
+    UserGroupInformation.setConfiguration(configuration);
   }
 
   @Override
   protected File onPerform(File inputFile) {
+    try {
+      if ("kerberos".equalsIgnoreCase(configuration.get("hadoop.security.authentication")))
+        UserGroupInformation.loginUserFromKeytab(properties.getHdfsKerberosPrincipal(), properties.getHdfsKerberosKeytabPath());
+    } catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+
     try (FileSystem fileSystem = FileSystem.get(configuration)) {
-      Path destination = new Path(destinationDirectory, inputFile.getName());
+
+      Path destination = new Path(properties.getHdfsDestinationDirectory(), inputFile.getName());
       if (fileSystem.exists(destination)) {
         throw new UnsupportedOperationException(String.format("File '%s' already exists!", destination));
       }
 
       fileSystem.copyFromLocalFile(new Path(inputFile.getAbsolutePath()), destination);
+      fileSystem.setPermission(destination, properties.getHdfsFilePermission());
 
       return inputFile;
     }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
index 531d2d5..1cf5b65 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
@@ -18,15 +18,23 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UncheckedIOException;
+import java.io.UnsupportedEncodingException;
+
 import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
-import java.io.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class LocalDocumentItemWriter implements DocumentItemWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(LocalDocumentItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(LocalDocumentItemWriter.class);
 
   private static final ObjectMapper json = new ObjectMapper();
   private static final String ENCODING = "UTF-8";
@@ -69,7 +77,7 @@ public class LocalDocumentItemWriter implements DocumentItemWriter {
   public void revert() {
     IOUtils.closeQuietly(bufferedWriter);
     if (!outFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
   }
 
   @Override
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
deleted file mode 100644
index 879b58b..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.UncheckedIOException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Optional;
-
-import static org.apache.commons.csv.CSVFormat.DEFAULT;
-
-public class S3AccessCsv implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(S3AccessCsv.class);
-
-  public static S3AccessCsv file(String path) {
-    try {
-      return new S3AccessCsv(new FileReader(path));
-    } catch (FileNotFoundException e) {
-      throw new UncheckedIOException(e);
-    }
-  }
-
-  private Map<String, String> passwordMap = new HashMap<>();
-
-  public S3AccessCsv(Reader reader) {
-    try (CSVParser csvParser = CSVParser.parse(reader, DEFAULT.withHeader(
-            S3AccessKeyNames.AccessKeyId.getCsvName(), S3AccessKeyNames.SecretAccessKey.getCsvName()))) {
-      Iterator<CSVRecord> iterator = csvParser.iterator();
-      if (!iterator.hasNext()) {
-        throw new S3AccessCsvFormatException("Csv file is empty!");
-      }
-
-      CSVRecord record = iterator.next();
-      if (record.size() < 2) {
-        throw new S3AccessCsvFormatException("Csv file contains less than 2 columns!");
-      }
-
-      checkColumnExists(record, S3AccessKeyNames.AccessKeyId);
-      checkColumnExists(record, S3AccessKeyNames.SecretAccessKey);
-
-      if (!iterator.hasNext()) {
-        throw new S3AccessCsvFormatException("Csv file contains header only!");
-      }
-
-      record = iterator.next();
-
-      Map<String, Integer> header = csvParser.getHeaderMap();
-      for (S3AccessKeyNames keyNames : S3AccessKeyNames.values())
-        passwordMap.put(keyNames.getEnvVariableName(), record.get(header.get(keyNames.getCsvName())));
-    } catch (IOException e) {
-      throw new UncheckedIOException(e);
-    } catch (S3AccessCsvFormatException e) {
-      LOG.warn("Unable to parse csv file: {}", e.getMessage());
-    }
-  }
-
-  private void checkColumnExists(CSVRecord record, S3AccessKeyNames s3AccessKeyName) {
-    if (!s3AccessKeyName.getCsvName().equals(record.get(s3AccessKeyName.getCsvName()))) {
-      throw new S3AccessCsvFormatException(String.format("Csv file does not contain the required column: '%s'", s3AccessKeyName.getCsvName()));
-    }
-  }
-
-  @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(passwordMap.get(propertyName));
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
deleted file mode 100644
index 59a4469..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class S3Properties {
-  private final String s3AccessFile;
-  private final String s3KeyPrefix;
-  private final String s3BucketName;
-  private final String s3EndPoint;
-
-  public S3Properties(String s3AccessFile, String s3KeyPrefix, String s3BucketName, String s3EndPoint) {
-    this.s3AccessFile = s3AccessFile;
-    this.s3KeyPrefix = s3KeyPrefix;
-    this.s3BucketName = s3BucketName;
-    this.s3EndPoint = s3EndPoint;
-  }
-
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
-  }
-
-  public String getS3BucketName() {
-    return s3BucketName;
-  }
-
-  public String getS3EndPoint() {
-    return s3EndPoint;
-  }
-
-  public String getS3AccessFile() {
-    return s3AccessFile;
-  }
-
-  @Override
-  public String toString() {
-    return "S3Properties{" +
-            "s3AccessFile='" + s3AccessFile + '\'' +
-            ", s3KeyPrefix='" + s3KeyPrefix + '\'' +
-            ", s3BucketName='" + s3BucketName + '\'' +
-            ", s3EndPoint='" + s3EndPoint + '\'' +
-            '}';
-  }
-
-  public void validate() {
-    if (isBlank(s3BucketName))
-      throw new IllegalArgumentException("The property s3BucketName can not be null or empty string!");
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
deleted file mode 100644
index 2536cb5..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
+++ /dev/null
@@ -1,74 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.s3.AmazonS3Client;
-import org.apache.ambari.infra.conf.security.CompositePasswordStore;
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.commons.lang.StringUtils.isNotBlank;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3Uploader extends AbstractFileAction {
-
-  private static final Logger LOG = LoggerFactory.getLogger(S3Uploader.class);
-
-  private final AmazonS3Client client;
-  private final String keyPrefix;
-  private final String bucketName;
-
-  public S3Uploader(S3Properties s3Properties, PasswordStore passwordStore) {
-    LOG.info("Initializing S3 client with " + s3Properties);
-
-    this.keyPrefix = s3Properties.getS3KeyPrefix();
-    this.bucketName = s3Properties.getS3BucketName();
-
-    PasswordStore compositePasswordStore = passwordStore;
-    if (isNotBlank((s3Properties.getS3AccessFile())))
-      compositePasswordStore = new CompositePasswordStore(passwordStore, S3AccessCsv.file(s3Properties.getS3AccessFile()));
-
-    BasicAWSCredentials credentials = new BasicAWSCredentials(
-            compositePasswordStore.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName())
-                    .orElseThrow(() -> new IllegalArgumentException("Access key Id is not present!")),
-            compositePasswordStore.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName())
-                    .orElseThrow(() -> new IllegalArgumentException("Secret Access Key is not present!")));
-    client = new AmazonS3Client(credentials);
-    if (!isBlank(s3Properties.getS3EndPoint()))
-      client.setEndpoint(s3Properties.getS3EndPoint());
-//     Note: without pathStyleAccess=true endpoint going to be <bucketName>.<host>:<port>
-//    client.setS3ClientOptions(S3ClientOptions.builder().setPathStyleAccess(true).build());
-  }
-
-  @Override
-  public File onPerform(File inputFile) {
-    String key = keyPrefix + inputFile.getName();
-
-    if (client.doesObjectExist(bucketName, key)) {
-      throw new UnsupportedOperationException(String.format("Object '%s' already exists in bucket '%s'", key, bucketName));
-    }
-
-    client.putObject(bucketName, key, inputFile);
-    return inputFile;
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
index fba08e7..ed77a06 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
@@ -18,19 +18,19 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import java.io.IOException;
+import java.io.UncheckedIOException;
+
 import org.apache.ambari.infra.job.SolrDAOBase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
 
 public class SolrDAO extends SolrDAOBase implements DocumentWiper {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAO.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAO.class);
 
   private final SolrProperties queryProperties;
 
@@ -53,7 +53,7 @@ public class SolrDAO extends SolrDAOBase implements DocumentWiper {
             .build();
     query.setRows(rows);
 
-    LOG.info("Executing solr query {}", query.toLocalParamsString());
+    logger.info("Executing solr query {}", query.toLocalParamsString());
 
     try {
       CloudSolrClient client = createClient();
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
index f8d8382..d505934 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
@@ -18,11 +18,6 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-
 import java.io.IOException;
 import java.io.UncheckedIOException;
 import java.text.DateFormat;
@@ -32,6 +27,11 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.TimeZone;
 
+import org.apache.ambari.infra.job.CloseableIterator;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+
 public class SolrDocumentIterator implements CloseableIterator<Document> {
 
   public static final String SOLR_DATE_FORMAT_TEXT = "yyyy-MM-dd'T'HH:mm:ss.SSSX";
@@ -56,21 +56,24 @@ public class SolrDocumentIterator implements CloseableIterator<Document> {
       return null;
     
     SolrDocument document = documentIterator.next();
-    HashMap<String, String> fieldMap = new HashMap<>();
+    HashMap<String, Object> fieldMap = new HashMap<>();
     for (String key : document.getFieldNames()) {
-      fieldMap.put(key, toString(document.get(key)));
+      fieldMap.put(key, convertFieldValue(document.get(key)));
     }
 
     return new Document(fieldMap);
   }
 
-  private String toString(Object value) {
+  private Object convertFieldValue(Object value) {
     if (value == null) {
       return null;
     }
     else if (value instanceof Date) {
       return SOLR_DATE_FORMAT.format(value);
     }
+    else if (value instanceof Integer || value instanceof Long) {
+      return value;
+    }
     else {
       return value.toString();
     }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
index 9770982..87caeb8 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
@@ -18,14 +18,14 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.solr.client.solrj.util.ClientUtils;
-
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.solr.client.solrj.util.ClientUtils;
+
 public class SolrParametrizedString {
   private static final String PARAMETER_PATTERN = "\\$\\{%s[a-z0-9A-Z]+}";
   private static final Pattern NO_PREFIX_PARAMETER_PATTERN = Pattern.compile(String.format(PARAMETER_PATTERN, ""));
@@ -49,16 +49,16 @@ public class SolrParametrizedString {
     return string;
   }
 
-  public SolrParametrizedString set(Map<String, String> parameterMap) {
+  public SolrParametrizedString set(Map<String, Object> parameterMap) {
     return set(NO_PREFIX_PARAMETER_PATTERN, null, parameterMap);
   }
 
-  public SolrParametrizedString set(String prefix, Map<String, String> parameterMap) {
+  public SolrParametrizedString set(String prefix, Map<String, Object> parameterMap) {
     String dottedPrefix = prefix + ".";
     return set(Pattern.compile(String.format(PARAMETER_PATTERN, dottedPrefix)), dottedPrefix, parameterMap);
   }
 
-  private SolrParametrizedString set(Pattern regExPattern, String prefix, Map<String, String> parameterMap) {
+  private SolrParametrizedString set(Pattern regExPattern, String prefix, Map<String, Object> parameterMap) {
     String newString = string;
     for (String paramName : collectParamNames(regExPattern)) {
       String paramSuffix = prefix == null ? paramName : paramName.replace(prefix, "");
@@ -68,10 +68,13 @@ public class SolrParametrizedString {
     return new SolrParametrizedString(newString);
   }
 
-  private String getValue(Map<String, String> parameterMap, String paramSuffix) {
-    String value = parameterMap.get(paramSuffix);
-    if ("*".equals(value))
-      return value;
-    return ClientUtils.escapeQueryChars(value);
+  private String getValue(Map<String, Object> parameterMap, String paramSuffix) {
+    Object value = parameterMap.get(paramSuffix);
+    if (value == null)
+      throw new NullPointerException(String.format("Value can not be null parameterMap[%s]", paramSuffix));
+    String stringValue = value.toString();
+    if ("*".equals(stringValue))
+      return stringValue;
+    return ClientUtils.escapeQueryChars(stringValue);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
index a2a78c2..cf26cda 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
@@ -18,12 +18,12 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.springframework.batch.core.JobParameters;
+import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
+import org.springframework.batch.core.JobParameters;
 
 public class SolrProperties {
   private String zooKeeperConnectionString;
@@ -88,12 +88,21 @@ public class SolrProperties {
             .addSort(sortColumn);
   }
 
-  public void apply(JobParameters jobParameters) {
-    zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString);
-    collection = jobParameters.getString("collection", collection);
-    queryText = jobParameters.getString("queryText", queryText);
-    filterQueryText = jobParameters.getString("filterQueryText", filterQueryText);
-    deleteQueryText = jobParameters.getString("deleteQueryText", deleteQueryText);
+  public void validate() {
+    if (isBlank(zooKeeperConnectionString))
+      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
+
+    if (isBlank(collection))
+      throw new IllegalArgumentException("The property collection can not be null or empty string!");
+  }
+
+  public SolrProperties merge(JobParameters jobParameters) {
+    SolrProperties solrProperties = new SolrProperties();
+    solrProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    solrProperties.setCollection(jobParameters.getString("collection", collection));
+    solrProperties.setQueryText(jobParameters.getString("queryText", queryText));
+    solrProperties.setFilterQueryText(jobParameters.getString("filterQueryText", filterQueryText));
+    solrProperties.setDeleteQueryText(jobParameters.getString("deleteQueryText", deleteQueryText));
 
     String sortValue;
     List<String> sortColumns = new ArrayList<>();
@@ -102,16 +111,13 @@ public class SolrProperties {
       sortColumns.add(sortValue);
       ++i;
     }
+    if (!sortColumns.isEmpty()) {
+      solrProperties.setSortColumn(sortColumns.toArray(new String[0]));
+    }
+    else {
+      solrProperties.setSortColumn(sortColumn);
+    }
 
-    if (sortColumns.size() > 0)
-      sortColumn = sortColumns.toArray(new String[sortColumns.size()]);
-  }
-
-  public void validate() {
-    if (isBlank(zooKeeperConnectionString))
-      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
-
-    if (isBlank(collection))
-      throw new IllegalArgumentException("The property collection can not be null or empty string!");
+    return solrProperties;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
index 0e41169..40fa19b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
@@ -18,20 +18,37 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.solr.client.solrj.SolrQuery;
+import static org.apache.ambari.infra.job.archive.FileNameSuffixFormatter.SOLR_DATETIME_FORMATTER;
+import static org.apache.commons.lang.StringUtils.isBlank;
+import static org.apache.commons.lang.StringUtils.isNotBlank;
+import static org.apache.solr.client.solrj.SolrQuery.ORDER.asc;
 
+import java.time.Duration;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
 import java.util.HashMap;
 import java.util.Map;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.solr.client.solrj.SolrQuery.ORDER.asc;
+import org.apache.solr.client.solrj.SolrQuery;
 
 public class SolrQueryBuilder {
 
+  public static String computeEnd(String end, Duration ttl) {
+    return computeEnd(end, OffsetDateTime.now(ZoneOffset.UTC), ttl);
+  }
+
+  public static String computeEnd(String end, OffsetDateTime now, Duration ttl) {
+    if (isNotBlank(end))
+      return end;
+    if (ttl != null)
+      return SOLR_DATETIME_FORMATTER.format(now.minus(ttl));
+    return null;
+  }
+
   private static final String INTERVAL_START = "start";
   private static final String INTERVAL_END = "end";
   private String queryText;
-  private final Map<String, String> interval;
+  private final Map<String, Object> interval;
   private String filterQueryText;
   private Document document;
   private String[] sortFields;
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
new file mode 100644
index 0000000..d6faa70
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.cleanup;
+
+import static org.apache.ambari.infra.job.JobsPropertyMap.PARAMETERS_CONTEXT_KEY;
+
+import javax.inject.Inject;
+
+import org.apache.ambari.infra.job.InfraJobExecutionDao;
+import org.apache.ambari.infra.job.JobPropertiesHolder;
+import org.apache.ambari.infra.job.JobScheduler;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepScope;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.event.ApplicationReadyEvent;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.event.EventListener;
+
+@Configuration
+public class CleanUpConfiguration {
+
+  public static final String JOB_NAME = "clean_up";
+  private final StepBuilderFactory steps;
+  private final JobBuilderFactory jobs;
+  private final JobScheduler scheduler;
+  private final CleanUpProperties cleanUpProperties;
+
+  @Inject
+  public CleanUpConfiguration(StepBuilderFactory steps, JobBuilderFactory jobs, CleanUpProperties cleanUpProperties, JobScheduler scheduler) {
+    this.steps = steps;
+    this.jobs = jobs;
+    this.scheduler = scheduler;
+    this.cleanUpProperties = cleanUpProperties;
+  }
+
+  @EventListener(ApplicationReadyEvent.class)
+  public void scheduleJob() {
+    cleanUpProperties.scheduling().ifPresent(schedulingProperties -> scheduler.schedule(JOB_NAME, schedulingProperties));
+  }
+
+  @Bean(name = "cleanUpJob")
+  public Job job(@Qualifier("cleanUpStep") Step cleanUpStep) {
+    return jobs.get(JOB_NAME).listener(new JobPropertiesHolder<>(cleanUpProperties)).start(cleanUpStep).build();
+  }
+
+  @Bean(name = "cleanUpStep")
+  protected Step cleanUpStep(TaskHistoryWiper taskHistoryWiper) {
+    return steps.get("cleanUpStep").tasklet(taskHistoryWiper).build();
+  }
+
+  @Bean
+  @StepScope
+  protected TaskHistoryWiper taskHistoryWiper(
+          InfraJobExecutionDao infraJobExecutionDao,
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") CleanUpProperties cleanUpProperties) {
+    return new TaskHistoryWiper(infraJobExecutionDao, cleanUpProperties.getTtl());
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
new file mode 100644
index 0000000..b29dcc1
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.cleanup;
+
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
+
+import java.time.Duration;
+
+import org.apache.ambari.infra.job.JobProperties;
+import org.apache.ambari.infra.job.Validatable;
+import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.StringToDurationConverter;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+
+@Configuration
+@ConfigurationProperties(prefix = "infra-manager.jobs.clean-up")
+public class CleanUpProperties extends JobProperties<CleanUpProperties> implements Validatable {
+
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @JsonDeserialize(converter = StringToDurationConverter.class)
+  private Duration ttl;
+
+  protected CleanUpProperties() {
+    setEnabled(true);
+  }
+
+  public Duration getTtl() {
+    return ttl;
+  }
+
+  public void setTtl(Duration ttl) {
+    this.ttl = ttl;
+  }
+
+  @Override
+  public void validate() {
+
+  }
+
+  @Override
+  public CleanUpProperties merge(JobParameters jobParameters) {
+    CleanUpProperties cleanUpProperties = new CleanUpProperties();
+    cleanUpProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return cleanUpProperties;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
new file mode 100644
index 0000000..2627f4c
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.cleanup;
+
+import java.time.Duration;
+import java.time.OffsetDateTime;
+
+import org.apache.ambari.infra.job.InfraJobExecutionDao;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.lang.NonNull;
+
+public class TaskHistoryWiper implements Tasklet {
+
+  private static final Logger logger = LogManager.getLogger(TaskHistoryWiper.class);
+  private static final Duration MINIMUM_TTL = Duration.ofHours(1);
+
+  private final InfraJobExecutionDao infraJobExecutionDao;
+  private final Duration ttl;
+
+  public TaskHistoryWiper(InfraJobExecutionDao infraJobExecutionDao, Duration ttl) {
+    this.infraJobExecutionDao = infraJobExecutionDao;
+    if (ttl == null || ttl.compareTo(MINIMUM_TTL) < 0) {
+      logger.info("The ttl value ({}) less than the minimum required. Using the minimum ({}) instead", ttl, MINIMUM_TTL);
+      this.ttl = MINIMUM_TTL;
+    }
+    else {
+      this.ttl = ttl;
+    }
+  }
+
+  @Override
+  public RepeatStatus execute(@NonNull StepContribution contribution, @NonNull ChunkContext chunkContext) {
+    infraJobExecutionDao.deleteJobExecutions(OffsetDateTime.now().minus(ttl));
+    return RepeatStatus.FINISHED;
+  }
+}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
similarity index 54%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
index 63b7dd2..be4e651 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
@@ -18,19 +18,29 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+import java.time.Duration;
+
 import org.apache.ambari.infra.job.JobProperties;
+import org.apache.ambari.infra.job.Validatable;
+import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.StringToDurationConverter;
 import org.springframework.batch.core.JobParameters;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class DocumentDeletingProperties extends JobProperties<DocumentDeletingProperties> {
+public class DeletingProperties extends JobProperties<DeletingProperties> implements Validatable {
   private String zooKeeperConnectionString;
   private String collection;
   private String filterField;
-
-  public DocumentDeletingProperties() {
-    super(DocumentDeletingProperties.class);
-  }
+  private String start;
+  private String end;
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @JsonDeserialize(converter = StringToDurationConverter.class)
+  private Duration ttl;
 
   public String getZooKeeperConnectionString() {
     return zooKeeperConnectionString;
@@ -56,11 +66,28 @@ public class DocumentDeletingProperties extends JobProperties<DocumentDeletingPr
     this.filterField = filterField;
   }
 
-  @Override
-  public void apply(JobParameters jobParameters) {
-    zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString);
-    collection = jobParameters.getString("collection", collection);
-    filterField = jobParameters.getString("filterField", filterField);
+  public String getStart() {
+    return start;
+  }
+
+  public void setStart(String start) {
+    this.start = start;
+  }
+
+  public String getEnd() {
+    return end;
+  }
+
+  public void setEnd(String end) {
+    this.end = end;
+  }
+
+  public Duration getTtl() {
+    return ttl;
+  }
+
+  public void setTtl(Duration ttl) {
+    this.ttl = ttl;
   }
 
   @Override
@@ -74,4 +101,16 @@ public class DocumentDeletingProperties extends JobProperties<DocumentDeletingPr
     if (isBlank(filterField))
       throw new IllegalArgumentException("The property filterField can not be null or empty string!");
   }
+
+  @Override
+  public DeletingProperties merge(JobParameters jobParameters) {
+    DeletingProperties deletingProperties = new DeletingProperties();
+    deletingProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    deletingProperties.setCollection(jobParameters.getString("collection", collection));
+    deletingProperties.setFilterField(jobParameters.getString("filterField", filterField));
+    deletingProperties.setStart(jobParameters.getString("start", "*"));
+    deletingProperties.setEnd(jobParameters.getString("end", "*"));
+    deletingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return deletingProperties;
+  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
index 4a68c49..1d6fb9a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
@@ -18,6 +18,10 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import static org.apache.ambari.infra.job.JobsPropertyMap.PARAMETERS_CONTEXT_KEY;
+
+import javax.inject.Inject;
+
 import org.apache.ambari.infra.job.AbstractJobsConfiguration;
 import org.apache.ambari.infra.job.JobScheduler;
 import org.springframework.batch.core.Job;
@@ -33,10 +37,8 @@ import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
-import javax.inject.Inject;
-
 @Configuration
-public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DocumentDeletingProperties> {
+public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DeletingProperties, DeletingProperties> {
 
   private final StepBuilderFactory steps;
   private final Step deleteStep;
@@ -70,9 +72,7 @@ public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<Doc
   @Bean
   @StepScope
   public DocumentWiperTasklet documentWiperTasklet(
-          @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentDeletingProperties properties,
-          @Value("#{jobParameters[start]}") String start,
-          @Value("#{jobParameters[end]}") String end) {
-    return new DocumentWiperTasklet(properties, start, end);
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") DeletingProperties parameters) {
+    return new DocumentWiperTasklet(parameters);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
index 1dc0caf..92d0428 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentDeletingPropertyMap {
-  private Map<String, DocumentDeletingProperties> solrDataDeleting;
+  private Map<String, DeletingProperties> solrDataDeleting;
 
-  public Map<String, DocumentDeletingProperties> getSolrDataDeleting() {
+  public Map<String, DeletingProperties> getSolrDataDeleting() {
     return solrDataDeleting;
   }
 
-  public void setSolrDataDeleting(Map<String, DocumentDeletingProperties> solrDataDeleting) {
+  public void setSolrDataDeleting(Map<String, DeletingProperties> solrDataDeleting) {
     this.solrDataDeleting = solrDataDeleting;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
index 463e6e0..9bde32a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
@@ -18,28 +18,30 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import static org.apache.ambari.infra.job.archive.SolrQueryBuilder.computeEnd;
+
 import org.apache.ambari.infra.job.SolrDAOBase;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.springframework.batch.core.StepContribution;
 import org.springframework.batch.core.scope.context.ChunkContext;
 import org.springframework.batch.core.step.tasklet.Tasklet;
 import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.lang.NonNull;
 
 public class DocumentWiperTasklet extends SolrDAOBase implements Tasklet {
-  private final String filterField;
-  private final String start;
-  private final String end;
+  private final DeletingProperties parameters;
 
-  public DocumentWiperTasklet(DocumentDeletingProperties properties, String start, String end) {
-    super(properties.getZooKeeperConnectionString(), properties.getCollection());
-    this.filterField = properties.getFilterField();
-    this.start = start;
-    this.end = end;
+  public DocumentWiperTasklet(DeletingProperties deletingProperties) {
+    super(deletingProperties.getZooKeeperConnectionString(), deletingProperties.getCollection());
+    parameters = deletingProperties;
   }
 
   @Override
-  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) {
-    delete(String.format("%s:[%s TO %s]", filterField, getValue(start), getValue(end)));
+  public RepeatStatus execute(@NonNull StepContribution contribution, @NonNull ChunkContext chunkContext) {
+    delete(String.format("%s:[%s TO %s]",
+            parameters.getFilterField(),
+            getValue(parameters.getStart()),
+            getValue(computeEnd(parameters.getEnd(), parameters.getTtl()))));
     return RepeatStatus.FINISHED;
   }
 
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
index a124e4d..ff5ad6d 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
@@ -18,17 +18,17 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.item.ItemProcessor;
 
 public class DummyItemProcessor implements ItemProcessor<DummyObject, String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemProcessor.class);
 
   @Override
   public String process(DummyObject input) throws Exception {
-    LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
+    logger.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
     Thread.sleep(10000);
     return String.format("%s, %s", input.getF1(), input.getF2());
   }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
index 89ad013..3adb91a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
@@ -18,14 +18,6 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.annotation.BeforeStep;
-import org.springframework.batch.item.ItemWriter;
-
-import javax.inject.Inject;
 import java.io.File;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -33,9 +25,18 @@ import java.nio.file.Paths;
 import java.util.Date;
 import java.util.List;
 
+import javax.inject.Inject;
+
+import org.apache.ambari.infra.conf.InfraManagerDataConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+import org.springframework.batch.item.ItemWriter;
+
 public class DummyItemWriter implements ItemWriter<String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemWriter.class);
 
   private StepExecution stepExecution;
 
@@ -44,17 +45,17 @@ public class DummyItemWriter implements ItemWriter<String> {
 
   @Override
   public void write(List<? extends String> values) throws Exception {
-    LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
+    logger.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
     Thread.sleep(1000);
     String outputDirectoryLocation = String.format("%s%s%s%s", infraManagerDataConfig.getDataFolder(), File.separator, "dummyOutput-", new Date().getTime());
     Path pathToDirectory = Paths.get(outputDirectoryLocation);
     Path pathToFile = Paths.get(String.format("%s%s%s", outputDirectoryLocation, File.separator, "dummyOutput.txt"));
     Files.createDirectories(pathToDirectory);
-    LOG.info("Write location to step execution context...");
+    logger.info("Write location to step execution context...");
     stepExecution.getExecutionContext().put("stepOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write location to job execution context...");
+    logger.info("Write location to job execution context...");
     stepExecution.getJobExecution().getExecutionContext().put("jobOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write to file: {}", pathToFile.toAbsolutePath());
+    logger.info("Write to file: {}", pathToFile.toAbsolutePath());
     Files.write(pathToFile, values.toString().getBytes());
   }
 
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
index 99c50e8..4602b9b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
@@ -18,28 +18,28 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobExecutionListener;
 
 public class DummyJobListener implements JobExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyJobListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyJobListener.class);
 
   @Override
   public void beforeJob(JobExecution jobExecution) {
-    LOG.info("Dummy - before job execution");
+    logger.info("Dummy - before job execution");
   }
 
   @Override
   public void afterJob(JobExecution jobExecution) {
-    LOG.info("Dummy - after job execution");
+    logger.info("Dummy - after job execution");
     if (jobExecution.getExecutionContext().get("jobOutputLocation") != null) {
       String jobOutputLocation = (String) jobExecution.getExecutionContext().get("jobOutputLocation");
       String exitDescription = "file://" + jobOutputLocation;
-      LOG.info("Add exit description '{}'", exitDescription);
+      logger.info("Add exit description '{}'", exitDescription);
       jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode(), exitDescription));
     }
   }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
index 548e650..838d846 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
@@ -18,24 +18,24 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepExecution;
 import org.springframework.batch.core.StepExecutionListener;
 
 public class DummyStepListener implements StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyStepListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyStepListener.class);
 
   @Override
   public void beforeStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - before step execution");
+    logger.info("Dummy step - before step execution");
   }
 
   @Override
   public ExitStatus afterStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - after step execution");
+    logger.info("Dummy step - after step execution");
     return stepExecution.getExitStatus();
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/DurationToStringConverter.java
similarity index 68%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/DurationToStringConverter.java
index 8e3387b..0946dff 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/DurationToStringConverter.java
@@ -16,13 +16,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.conf.security;
+package org.apache.ambari.infra.json;
 
-import java.util.Optional;
+import java.time.Duration;
 
-public class SecurityEnvironment implements PasswordStore {
+import com.fasterxml.jackson.databind.util.StdConverter;
+
+public class DurationToStringConverter extends StdConverter<Duration, String> {
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+  public String convert(Duration value) {
+    return toString(value);
+  }
+
+  public static String toString(Duration value) {
+    return value == null ? null : value.toString();
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/FsPermissionToStringConverter.java
similarity index 65%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/FsPermissionToStringConverter.java
index 8e3387b..f8ab4c0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/FsPermissionToStringConverter.java
@@ -16,13 +16,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.conf.security;
+package org.apache.ambari.infra.json;
 
-import java.util.Optional;
+import org.apache.hadoop.fs.permission.FsPermission;
 
-public class SecurityEnvironment implements PasswordStore {
+import com.fasterxml.jackson.databind.util.StdConverter;
+
+public class FsPermissionToStringConverter extends StdConverter<FsPermission, String> {
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+  public String convert(FsPermission value) {
+    return toString(value);
+  }
+
+  public static String toString(FsPermission value) {
+    return value == null ? null : Short.toString(value.toOctal());
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/OffsetDateTimeToStringConverter.java
similarity index 68%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/OffsetDateTimeToStringConverter.java
index 8e3387b..25bdf19 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/OffsetDateTimeToStringConverter.java
@@ -16,13 +16,17 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.conf.security;
+package org.apache.ambari.infra.json;
 
-import java.util.Optional;
+import java.time.OffsetDateTime;
+import java.time.format.DateTimeFormatter;
+
+import com.fasterxml.jackson.databind.util.StdConverter;
+
+public class OffsetDateTimeToStringConverter extends StdConverter<OffsetDateTime, String> {
 
-public class SecurityEnvironment implements PasswordStore {
   @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+  public String convert(OffsetDateTime value) {
+    return DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(value);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToDurationConverter.java
similarity index 58%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToDurationConverter.java
index 1dc0caf..4746048 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToDurationConverter.java
@@ -16,23 +16,27 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.job.deleting;
+package org.apache.ambari.infra.json;
 
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.context.annotation.Configuration;
+import java.time.Duration;
 
-import java.util.Map;
+import javax.inject.Named;
 
-@Configuration
-@ConfigurationProperties(prefix = "infra-manager.jobs")
-public class DocumentDeletingPropertyMap {
-  private Map<String, DocumentDeletingProperties> solrDataDeleting;
+import org.springframework.boot.context.properties.ConfigurationPropertiesBinding;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.lang.NonNull;
 
-  public Map<String, DocumentDeletingProperties> getSolrDataDeleting() {
-    return solrDataDeleting;
+import com.fasterxml.jackson.databind.util.StdConverter;
+
+@Named
+@ConfigurationPropertiesBinding
+public class StringToDurationConverter extends StdConverter<String, Duration> implements Converter<String, Duration> {
+  @Override
+  public Duration convert(@NonNull String value) {
+    return toDuration(value);
   }
 
-  public void setSolrDataDeleting(Map<String, DocumentDeletingProperties> solrDataDeleting) {
-    this.solrDataDeleting = solrDataDeleting;
+  public static Duration toDuration(String value) {
+    return value == null ? null : Duration.parse(value);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToFsPermissionConverter.java
similarity index 54%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToFsPermissionConverter.java
index 1dc0caf..e271346 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/json/StringToFsPermissionConverter.java
@@ -16,23 +16,28 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.job.deleting;
+package org.apache.ambari.infra.json;
 
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.context.annotation.Configuration;
+import static org.apache.commons.lang.StringUtils.isBlank;
 
-import java.util.Map;
+import javax.inject.Named;
 
-@Configuration
-@ConfigurationProperties(prefix = "infra-manager.jobs")
-public class DocumentDeletingPropertyMap {
-  private Map<String, DocumentDeletingProperties> solrDataDeleting;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.springframework.boot.context.properties.ConfigurationPropertiesBinding;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.lang.NonNull;
 
-  public Map<String, DocumentDeletingProperties> getSolrDataDeleting() {
-    return solrDataDeleting;
+import com.fasterxml.jackson.databind.util.StdConverter;
+
+@Named
+@ConfigurationPropertiesBinding
+public class StringToFsPermissionConverter extends StdConverter<String, FsPermission> implements Converter<String, FsPermission> {
+  @Override
+  public FsPermission convert(@NonNull String value) {
+    return toFsPermission(value);
   }
 
-  public void setSolrDataDeleting(Map<String, DocumentDeletingProperties> solrDataDeleting) {
-    this.solrDataDeleting = solrDataDeleting;
+  public static FsPermission toFsPermission(String value) {
+    return isBlank(value) ? null : new FsPermission(value);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
index f35387d..7886452 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
@@ -18,9 +18,21 @@
  */
 package org.apache.ambari.infra.manager;
 
-import com.google.common.collect.Lists;
+import static java.util.Collections.unmodifiableList;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
 import org.apache.ambari.infra.model.ExecutionContextResponse;
-import org.apache.ambari.infra.model.JobDetailsResponse;
 import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
 import org.apache.ambari.infra.model.JobExecutionInfoResponse;
 import org.apache.ambari.infra.model.JobInstanceDetailsResponse;
@@ -28,8 +40,8 @@ import org.apache.ambari.infra.model.JobOperationParams;
 import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.history.StepExecutionHistory;
 import org.springframework.batch.admin.service.JobService;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
@@ -50,22 +62,12 @@ import org.springframework.batch.core.repository.JobExecutionAlreadyRunningExcep
 import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
 import org.springframework.batch.core.repository.JobRestartException;
 
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.TimeZone;
+import com.google.common.collect.Lists;
 
 @Named
 public class JobManager implements Jobs {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
+  private static final Logger logger = LogManager.getLogger(JobManager.class);
 
   @Inject
   private JobService jobService;
@@ -76,8 +78,6 @@ public class JobManager implements Jobs {
   @Inject
   private JobExplorer jobExplorer;
 
-  private TimeZone timeZone = TimeZone.getDefault();
-
   public Set<String> getAllJobNames() {
     return jobOperator.getJobNames();
   }
@@ -95,7 +95,7 @@ public class JobManager implements Jobs {
     if (!running.isEmpty())
       throw new JobExecutionAlreadyRunningException("An instance of this job is already active: "+jobName);
 
-    return new JobExecutionInfoResponse(jobService.launch(jobName, jobParameters), timeZone);
+    return new JobExecutionInfoResponse(jobService.launch(jobName, jobParameters));
   }
 
   @Override
@@ -110,6 +110,16 @@ public class JobManager implements Jobs {
     return jobService.listJobExecutionsForJob(jobName, 0, 1).stream().findFirst();
   }
 
+  @Override
+  public void stopAndAbandon(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException {
+    try {
+      jobService.stop(jobExecutionId);
+    } catch (JobExecutionNotRunningException e) {
+      logger.warn(String.format("Job is not running jobExecutionId=%d", jobExecutionId), e.getMessage());
+    }
+    jobService.abandon(jobExecutionId);
+  }
+
   /**
    * Get all executions ids that mapped to specific job name,
    */
@@ -131,10 +141,10 @@ public class JobManager implements Jobs {
     JobExecution jobExecution = jobService.getJobExecution(jobExecutionId);
     List<StepExecutionInfoResponse> stepExecutionInfoList = new ArrayList<>();
     for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
-      stepExecutionInfoList.add(new StepExecutionInfoResponse(stepExecution, timeZone));
+      stepExecutionInfoList.add(new StepExecutionInfoResponse(stepExecution));
     }
-    stepExecutionInfoList.sort(Comparator.comparing(StepExecutionInfoResponse::getId));
-    return new JobExecutionDetailsResponse(new JobExecutionInfoResponse(jobExecution, timeZone), stepExecutionInfoList);
+    stepExecutionInfoList.sort(Comparator.comparing(StepExecutionInfoResponse::getStepExecutionId));
+    return new JobExecutionDetailsResponse(new JobExecutionInfoResponse(jobExecution), stepExecutionInfoList);
   }
 
   /**
@@ -150,8 +160,8 @@ public class JobManager implements Jobs {
     } else {
       throw new UnsupportedOperationException("Unsupported operaration");
     }
-    LOG.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
-    return new JobExecutionInfoResponse(jobExecution, timeZone);
+    logger.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
+    return new JobExecutionInfoResponse(jobExecution);
   }
 
   /**
@@ -176,7 +186,7 @@ public class JobManager implements Jobs {
       Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstanceId);
       JobExecution jobExecution = jobExecutions.iterator().next();
       Long jobExecutionId = jobExecution.getId();
-      return new JobExecutionInfoResponse(jobService.restart(jobExecutionId), timeZone);
+      return new JobExecutionInfoResponse(jobService.restart(jobExecutionId));
     } else {
       throw new UnsupportedOperationException("Unsupported operation (try: RESTART)");
     }
@@ -211,7 +221,7 @@ public class JobManager implements Jobs {
     JobInstance jobInstance = jobService.getJobInstance(jobInstanceId);
     Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getInstanceId());
     for (JobExecution jobExecution : jobExecutions) {
-      result.add(new JobExecutionInfoResponse(jobExecution, timeZone));
+      result.add(new JobExecutionInfoResponse(jobExecution));
     }
     return result;
   }
@@ -219,25 +229,26 @@ public class JobManager implements Jobs {
   /**
    * Get job details for a specific job. (paged)
    */
-  public JobDetailsResponse getJobDetails(String jobName, int page, int size) throws NoSuchJobException {
+  public List<JobInstanceDetailsResponse> getJobDetails(String jobName, int page, int size) throws NoSuchJobException {
     List<JobInstanceDetailsResponse> jobInstanceResponses = Lists.newArrayList();
     Collection<JobInstance> jobInstances = jobService.listJobInstances(jobName, page, size);
 
-    int count = jobService.countJobExecutionsForJob(jobName);
     boolean launchable = jobService.isLaunchable(jobName);
-    boolean isIncrementable = jobService.isIncrementable(jobName);
+    boolean incrementable = jobService.isIncrementable(jobName);
 
     for (JobInstance jobInstance: jobInstances) {
-      List<JobExecutionInfoResponse> executionInfos = Lists.newArrayList();
+      List<JobExecutionInfoResponse> executionInfoResponses = Lists.newArrayList();
       Collection<JobExecution> jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getId());
       if (jobExecutions != null) {
         for (JobExecution jobExecution : jobExecutions) {
-          executionInfos.add(new JobExecutionInfoResponse(jobExecution, timeZone));
+          executionInfoResponses.add(new JobExecutionInfoResponse(jobExecution));
         }
       }
-      jobInstanceResponses.add(new JobInstanceDetailsResponse(jobInstance, executionInfos));
+      jobInstanceResponses.add(new JobInstanceDetailsResponse(
+              new JobInfo(jobName, executionInfoResponses.size(), jobInstance.getInstanceId(), launchable, incrementable),
+              executionInfoResponses));
     }
-    return new JobDetailsResponse(new JobInfo(jobName, count, launchable, isIncrementable), jobInstanceResponses);
+    return unmodifiableList(jobInstanceResponses);
   }
 
   /**
@@ -245,7 +256,7 @@ public class JobManager implements Jobs {
    */
   public StepExecutionInfoResponse getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
     StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
-    return new StepExecutionInfoResponse(stepExecution, timeZone);
+    return new StepExecutionInfoResponse(stepExecution);
   }
 
   /**
@@ -265,7 +276,7 @@ public class JobManager implements Jobs {
    */
   public StepExecutionProgressResponse getStepExecutionProgress(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
     StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
-    StepExecutionInfoResponse stepExecutionInfoResponse = new StepExecutionInfoResponse(stepExecution, timeZone);
+    StepExecutionInfoResponse stepExecutionInfoResponse = new StepExecutionInfoResponse(stepExecution);
     String stepName = stepExecution.getStepName();
     if (stepName.contains(":partition")) {
       stepName = stepName.replaceAll("(:partition).*", "$1*");
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java
index b2ca605..b5a3b5a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java
@@ -18,6 +18,8 @@
  */
 package org.apache.ambari.infra.manager;
 
+import java.util.Optional;
+
 import org.apache.ambari.infra.model.JobExecutionInfoResponse;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobParameters;
@@ -28,8 +30,6 @@ import org.springframework.batch.core.repository.JobExecutionAlreadyRunningExcep
 import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
 import org.springframework.batch.core.repository.JobRestartException;
 
-import java.util.Optional;
-
 public interface Jobs {
   JobExecutionInfoResponse launchJob(String jobName, JobParameters params)
           throws JobParametersInvalidException, NoSuchJobException,
@@ -39,4 +39,6 @@ public interface Jobs {
           JobParametersInvalidException, JobRestartException, NoSuchJobExecutionException;
 
   Optional<JobExecution> lastRun(String jobName) throws NoSuchJobException, NoSuchJobExecutionException;
+
+  void stopAndAbandon(Long jobExecution) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException;
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/DateUtil.java
similarity index 72%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
rename to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/DateUtil.java
index 8e3387b..810a95b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/DateUtil.java
@@ -16,13 +16,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.conf.security;
+package org.apache.ambari.infra.model;
 
-import java.util.Optional;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.Date;
 
-public class SecurityEnvironment implements PasswordStore {
-  @Override
-  public Optional<String> getPassword(String propertyName) {
-    return Optional.ofNullable(System.getenv(propertyName));
+public class DateUtil {
+  public static OffsetDateTime toOffsetDateTime(Date date) {
+    if (date == null)
+      return null;
+    return date.toInstant().atOffset(ZoneOffset.UTC);
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
index 2d46c54..0eb1a16 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
@@ -18,6 +18,8 @@
  */
 package org.apache.ambari.infra.model;
 
+import static java.util.Collections.unmodifiableMap;
+
 import java.util.Map;
 
 public class ExecutionContextResponse {
@@ -27,7 +29,7 @@ public class ExecutionContextResponse {
 
   public ExecutionContextResponse(Long jobExecutionId, Map<String, Object> executionContextMap) {
     this.jobExecutionId = jobExecutionId;
-    this.executionContextMap = executionContextMap;
+    this.executionContextMap = unmodifiableMap(executionContextMap);
   }
 
   public Long getJobExecutionId() {
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ISO8601DateFormatter.java
similarity index 52%
copy from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
copy to ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ISO8601DateFormatter.java
index b176f12..5f945b6 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ISO8601DateFormatter.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,33 +18,24 @@
  */
 package org.apache.ambari.infra.model;
 
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-
-public class JobExecutionStopRequest {
-
-  @PathParam("jobExecutionId")
-  @NotNull
-  private Long jobExecutionId;
-
-  @QueryParam("operation")
-  @NotNull
-  private JobOperationParams.JobStopOrAbandonOperationParam operation;
-
-  public Long getJobExecutionId() {
-    return jobExecutionId;
-  }
-
-  public void setJobExecutionId(Long jobExecutionId) {
-    this.jobExecutionId = jobExecutionId;
-  }
-
-  public JobOperationParams.JobStopOrAbandonOperationParam getOperation() {
-    return operation;
+import java.text.DateFormat;
+import java.text.FieldPosition;
+import java.text.ParsePosition;
+import java.time.OffsetDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Date;
+
+public class ISO8601DateFormatter extends DateFormat {
+  @Override
+  public StringBuffer format(Date date, StringBuffer toAppendTo, FieldPosition fieldPosition) {
+    toAppendTo.append(DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(DateUtil.toOffsetDateTime(date)));
+    return toAppendTo;
   }
 
-  public void setOperation(JobOperationParams.JobStopOrAbandonOperationParam operation) {
-    this.operation = operation;
+  @Override
+  public Date parse(String source, ParsePosition pos) {
+    OffsetDateTime offsetDateTime = OffsetDateTime.parse(source, DateTimeFormatter.ISO_OFFSET_DATE_TIME);
+    pos.setIndex(pos.getIndex() + source.length());
+    return Date.from(offsetDateTime.toInstant());
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
deleted file mode 100644
index cd34fef..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import org.springframework.batch.admin.web.JobInfo;
-
-import java.util.List;
-
-public class JobDetailsResponse {
-
-  private JobInfo jobInfo;
-  private List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList;
-
-  public JobDetailsResponse() {
-  }
-
-  public JobDetailsResponse(JobInfo jobInfo, List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList) {
-    this.jobInfo = jobInfo;
-    this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
-  }
-
-  public JobInfo getJobInfo() {
-    return jobInfo;
-  }
-
-  public void setJobInfo(JobInfo jobInfo) {
-    this.jobInfo = jobInfo;
-  }
-
-  public List<JobInstanceDetailsResponse> getJobInstanceDetailsResponseList() {
-    return jobInstanceDetailsResponseList;
-  }
-
-  public void setJobInstanceDetailsResponseList(List<JobInstanceDetailsResponse> jobInstanceDetailsResponseList) {
-    this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
index 695b57f..9e19344 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
@@ -18,32 +18,30 @@
  */
 package org.apache.ambari.infra.model;
 
+import static java.util.Collections.unmodifiableList;
+
 import java.util.List;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
+
 public class JobExecutionDetailsResponse {
 
-  private JobExecutionInfoResponse jobExecutionInfoResponse;
+  @JsonProperty("jobExecution")
+  private final JobExecutionInfoResponse jobExecutionInfoResponse;
 
-  private List<StepExecutionInfoResponse> stepExecutionInfoList;
+  @JsonProperty("stepExecutionList")
+  private final List<StepExecutionInfoResponse> stepExecutionInfoList;
 
   public JobExecutionDetailsResponse(JobExecutionInfoResponse jobExecutionInfoResponse, List<StepExecutionInfoResponse> stepExecutionInfoList) {
     this.jobExecutionInfoResponse = jobExecutionInfoResponse;
-    this.stepExecutionInfoList = stepExecutionInfoList;
+    this.stepExecutionInfoList = unmodifiableList(stepExecutionInfoList);
   }
 
   public JobExecutionInfoResponse getJobExecutionInfoResponse() {
     return jobExecutionInfoResponse;
   }
 
-  public void setJobExecutionInfoResponse(JobExecutionInfoResponse jobExecutionInfoResponse) {
-    this.jobExecutionInfoResponse = jobExecutionInfoResponse;
-  }
-
   public List<StepExecutionInfoResponse> getStepExecutionInfoList() {
     return stepExecutionInfoList;
   }
-
-  public void setStepExecutionInfoList(List<StepExecutionInfoResponse> stepExecutionInfoList) {
-    this.stepExecutionInfoList = stepExecutionInfoList;
-  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
index a7e4a4f..1034eb8 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
@@ -18,111 +18,122 @@
  */
 package org.apache.ambari.infra.model;
 
-import org.apache.ambari.infra.model.wrapper.JobExecutionData;
-import org.springframework.batch.admin.web.JobParametersExtractor;
+import static java.util.Collections.unmodifiableList;
+import static org.apache.ambari.infra.model.DateUtil.toOffsetDateTime;
+
+import java.time.Duration;
+import java.time.OffsetDateTime;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.OffsetDateTimeToStringConverter;
 import org.springframework.batch.core.BatchStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobInstance;
 import org.springframework.batch.core.converter.DefaultJobParametersConverter;
-import org.springframework.batch.core.converter.JobParametersConverter;
 
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Properties;
-import java.util.TimeZone;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+
+import io.swagger.annotations.ApiModelProperty;
 
 public class JobExecutionInfoResponse {
-  private Long id;
-  private int stepExecutionCount;
-  private Long jobId;
-  private String jobName;
-  private String startDate = "";
-  private String startTime = "";
-  private String duration = "";
-  private JobExecutionData jobExecutionData;
-  private Properties jobParameters;
-  private String jobParametersString;
-  private boolean restartable = false;
-  private boolean abandonable = false;
-  private boolean stoppable = false;
-  private final TimeZone timeZone;
-
-
-  public JobExecutionInfoResponse(JobExecution jobExecution, TimeZone timeZone) {
-    JobParametersConverter converter = new DefaultJobParametersConverter();
-    this.jobExecutionData = new JobExecutionData(jobExecution);
-    this.timeZone = timeZone;
-    this.id = jobExecutionData.getId();
-    this.jobId = jobExecutionData.getJobId();
-    this.stepExecutionCount = jobExecutionData.getStepExecutions().size();
-    this.jobParameters = converter.getProperties(jobExecutionData.getJobParameters());
-    this.jobParametersString = (new JobParametersExtractor()).fromJobParameters(jobExecutionData.getJobParameters());
-    JobInstance jobInstance = jobExecutionData.getJobInstance();
+  private static final DefaultJobParametersConverter DEFAULT_JOB_PARAMETERS_CONVERTER = new DefaultJobParametersConverter();
+
+  static {
+    DEFAULT_JOB_PARAMETERS_CONVERTER.setDateFormat(new ISO8601DateFormatter());
+  }
+
+  private final Long jobExecutionId;
+  private final Long jobInstanceId;
+  private final String jobName;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime creationTime;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime startTime;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime lastUpdatedTime;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime endTime;
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @ApiModelProperty(dataType = "java.lang.String", example = "PT5.311S")
+  private final Duration duration;
+  private final Properties jobParameters;
+  private final BatchStatus batchStatus;
+  @ApiModelProperty(example = "COMPLETED", allowableValues = "UNKNOWN, EXECUTING, COMPLETED, NOOP, FAILED, STOPPED")
+  private final String exitCode;
+  private final String exitDescription;
+  private final boolean restartable;
+  private final boolean abandonable;
+  private final boolean stoppable;
+  private final List<Throwable> failureExceptions;
+  private final String jobConfigurationName;
+
+
+  public JobExecutionInfoResponse(JobExecution jobExecution) {
+    this.jobExecutionId = jobExecution.getId();
+    this.jobInstanceId = jobExecution.getJobId();
+    this.jobParameters = DEFAULT_JOB_PARAMETERS_CONVERTER.getProperties(jobExecution.getJobParameters());
+    this.creationTime = toOffsetDateTime(jobExecution.getCreateTime());
+    this.startTime = toOffsetDateTime(jobExecution.getStartTime());
+    this.lastUpdatedTime = toOffsetDateTime(jobExecution.getLastUpdated());
+    this.endTime = toOffsetDateTime(jobExecution.getEndTime());
+    JobInstance jobInstance = jobExecution.getJobInstance();
+    this.batchStatus = jobExecution.getStatus();
+    this.restartable = batchStatus.isGreaterThan(BatchStatus.STOPPING) && batchStatus.isLessThan(BatchStatus.ABANDONED);
+    this.abandonable = batchStatus.isGreaterThan(BatchStatus.STARTED) && batchStatus != BatchStatus.ABANDONED;
+    this.stoppable = batchStatus.isLessThan(BatchStatus.STOPPING);
+
+    if (jobExecution.getExitStatus() != null) {
+      this.exitCode = jobExecution.getExitStatus().getExitCode();
+      this.exitDescription = jobExecution.getExitStatus().getExitDescription();
+    }
+    else {
+      this.exitCode = null;
+      this.exitDescription = null;
+    }
+
     if(jobInstance != null) {
       this.jobName = jobInstance.getJobName();
-      BatchStatus endTime = jobExecutionData.getStatus();
-      this.restartable = endTime.isGreaterThan(BatchStatus.STOPPING) && endTime.isLessThan(BatchStatus.ABANDONED);
-      this.abandonable = endTime.isGreaterThan(BatchStatus.STARTED) && endTime != BatchStatus.ABANDONED;
-      this.stoppable = endTime.isLessThan(BatchStatus.STOPPING);
     } else {
       this.jobName = "?";
     }
 
-    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
-    SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
-    SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
-
-    durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
-    timeFormat.setTimeZone(timeZone);
-    dateFormat.setTimeZone(timeZone);
-    if(jobExecutionData.getStartTime() != null) {
-      this.startDate = dateFormat.format(jobExecutionData.getStartTime());
-      this.startTime = timeFormat.format(jobExecutionData.getStartTime());
-      Date endTime1 = jobExecutionData.getEndTime() != null? jobExecutionData.getEndTime():new Date();
-      this.duration = durationFormat.format(new Date(endTime1.getTime() - jobExecutionData.getStartTime().getTime()));
+    if(startTime != null && endTime != null) {
+      this.duration = Duration.between(startTime, endTime);
+    }
+    else {
+      this.duration = null;
     }
-  }
 
-  public Long getId() {
-    return id;
+    this.failureExceptions = unmodifiableList(jobExecution.getFailureExceptions());
+    this.jobConfigurationName = jobExecution.getJobConfigurationName();
   }
 
-  public int getStepExecutionCount() {
-    return stepExecutionCount;
+  public Long getJobExecutionId() {
+    return jobExecutionId;
   }
 
-  public Long getJobId() {
-    return jobId;
+  public Long getJobInstanceId() {
+    return jobInstanceId;
   }
 
   public String getJobName() {
     return jobName;
   }
 
-  public String getStartDate() {
-    return startDate;
-  }
-
-  public String getStartTime() {
+  public OffsetDateTime getStartTime() {
     return startTime;
   }
 
-  public String getDuration() {
+  public Duration getDuration() {
     return duration;
   }
 
-  public JobExecutionData getJobExecutionData() {
-    return jobExecutionData;
-  }
-
   public Properties getJobParameters() {
     return jobParameters;
   }
 
-  public String getJobParametersString() {
-    return jobParametersString;
-  }
-
   public boolean isRestartable() {
     return restartable;
   }
@@ -135,7 +146,35 @@ public class JobExecutionInfoResponse {
     return stoppable;
   }
 
-  public TimeZone getTimeZone() {
-    return timeZone;
+  public BatchStatus getBatchStatus() {
+    return batchStatus;
+  }
+
+  public OffsetDateTime getCreationTime() {
+    return creationTime;
+  }
+
+  public OffsetDateTime getEndTime() {
+    return endTime;
+  }
+
+  public OffsetDateTime getLastUpdatedTime() {
+    return lastUpdatedTime;
+  }
+
+  public String getExitCode() {
+    return exitCode;
+  }
+
+  public String getExitDescription() {
+    return exitDescription;
+  }
+
+  public List<Throwable> getFailureExceptions() {
+    return this.failureExceptions;
+  }
+
+  public String getJobConfigurationName() {
+    return this.jobConfigurationName;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
index 3eab25f..fe36c5b 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
@@ -22,6 +22,8 @@ import javax.validation.constraints.NotNull;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
 
+import io.swagger.annotations.ApiParam;
+
 public class JobExecutionRestartRequest {
 
   @PathParam("jobName")
@@ -34,6 +36,7 @@ public class JobExecutionRestartRequest {
 
   @QueryParam("operation")
   @NotNull
+  @ApiParam(required = true)
   private JobOperationParams.JobRestartOperationParam operation;
 
   public String getJobName() {
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
index b176f12..510e694 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
@@ -22,6 +22,8 @@ import javax.validation.constraints.NotNull;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
 
+import io.swagger.annotations.ApiParam;
+
 public class JobExecutionStopRequest {
 
   @PathParam("jobExecutionId")
@@ -30,6 +32,7 @@ public class JobExecutionStopRequest {
 
   @QueryParam("operation")
   @NotNull
+  @ApiParam(required = true)
   private JobOperationParams.JobStopOrAbandonOperationParam operation;
 
   public Long getJobExecutionId() {
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
index af88654..9bb7994 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
@@ -18,37 +18,31 @@
  */
 package org.apache.ambari.infra.model;
 
-import org.springframework.batch.core.JobInstance;
+import static java.util.Collections.unmodifiableList;
 
 import java.util.List;
 
-public class JobInstanceDetailsResponse {
+import org.springframework.batch.admin.web.JobInfo;
 
-  private JobInstance jobInstance;
+import com.fasterxml.jackson.annotation.JsonProperty;
 
-  private List<JobExecutionInfoResponse> jobExecutionInfoResponseList;
 
-  public JobInstanceDetailsResponse() {
-  }
+public class JobInstanceDetailsResponse {
 
-  public JobInstanceDetailsResponse(JobInstance jobInstance, List<JobExecutionInfoResponse> jobExecutionInfoResponseList) {
-    this.jobInstance = jobInstance;
-    this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
-  }
+  private final JobInfo jobInfo;
+  @JsonProperty("jobExecutionList")
+  private final List<JobExecutionInfoResponse> jobExecutionInfoResponseList;
 
-  public JobInstance getJobInstance() {
-    return jobInstance;
+  public JobInstanceDetailsResponse(JobInfo jobInfo, List<JobExecutionInfoResponse> jobExecutionInfoResponseList) {
+    this.jobInfo = jobInfo;
+    this.jobExecutionInfoResponseList = unmodifiableList(jobExecutionInfoResponseList);
   }
 
-  public void setJobInstance(JobInstance jobInstance) {
-    this.jobInstance = jobInstance;
+  public JobInfo getJobInfo() {
+    return jobInfo;
   }
 
   public List<JobExecutionInfoResponse> getJobExecutionInfoResponseList() {
     return jobExecutionInfoResponseList;
   }
-
-  public void setJobExecutionInfoResponseList(List<JobExecutionInfoResponse> jobExecutionInfoResponseList) {
-    this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
-  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
index 905a4fa..5c760f6 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
@@ -22,14 +22,17 @@ import javax.validation.constraints.NotNull;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
 
+import io.swagger.annotations.ApiParam;
+
 public class JobInstanceStartRequest {
 
   @PathParam("jobName")
   @NotNull
+  @ApiParam(required = true)
   private String jobName;
 
   @QueryParam("params")
-  String params;
+  private String params;
 
   public String getJobName() {
     return jobName;
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
index 0e67a87..9ce56af 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
@@ -18,23 +18,22 @@
  */
 package org.apache.ambari.infra.model;
 
+import static java.util.Collections.unmodifiableMap;
+
 import java.util.Map;
 
 public class StepExecutionContextResponse {
 
-  private Map<String, Object> executionContextMap;
-
-  private Long jobExecutionId;
+  private final Map<String, Object> executionContextMap;
 
-  private Long stepExecutionId;
+  private final Long jobExecutionId;
 
-  private String stepName;
+  private final Long stepExecutionId;
 
-  public StepExecutionContextResponse() {
-  }
+  private final String stepName;
 
   public StepExecutionContextResponse(Map<String, Object> executionContextMap, Long jobExecutionId, Long stepExecutionId, String stepName) {
-    this.executionContextMap = executionContextMap;
+    this.executionContextMap = unmodifiableMap(executionContextMap);
     this.jobExecutionId = jobExecutionId;
     this.stepExecutionId = stepExecutionId;
     this.stepName = stepName;
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
index ed04767..011eaea 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
@@ -18,98 +18,101 @@
  */
 package org.apache.ambari.infra.model;
 
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import org.apache.ambari.infra.model.wrapper.StepExecutionData;
-import org.springframework.batch.core.JobExecution;
+import static org.apache.ambari.infra.model.DateUtil.toOffsetDateTime;
+
+import java.time.Duration;
+import java.time.OffsetDateTime;
+
+import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.OffsetDateTimeToStringConverter;
+import org.springframework.batch.core.BatchStatus;
 import org.springframework.batch.core.StepExecution;
 
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.TimeZone;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class StepExecutionInfoResponse {
-  private Long id;
-  private Long jobExecutionId;
-  private String jobName;
-  private String name;
-  private String startDate = "-";
-  private String startTime = "-";
-  private String duration = "-";
-  private StepExecutionData stepExecutionData;
-  private long durationMillis;
-
-  public StepExecutionInfoResponse(String jobName, Long jobExecutionId, String name, TimeZone timeZone) {
-    this.jobName = jobName;
-    this.jobExecutionId = jobExecutionId;
-    this.name = name;
-    this.stepExecutionData = new StepExecutionData(new StepExecution(name, new JobExecution(jobExecutionId)));
-  }
+import io.swagger.annotations.ApiModelProperty;
 
-  public StepExecutionInfoResponse(StepExecution stepExecution, TimeZone timeZone) {
-    this.stepExecutionData = new StepExecutionData(stepExecution);
-    this.id = stepExecutionData.getId();
-    this.name = stepExecutionData.getStepName();
-    this.jobName = stepExecutionData.getJobExecution() != null && stepExecutionData.getJobExecution().getJobInstance() != null? stepExecutionData.getJobExecution().getJobInstance().getJobName():"?";
-    this.jobExecutionId = stepExecutionData.getJobExecutionId();
-    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
-    SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
-    SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
-
-    durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
-    timeFormat.setTimeZone(timeZone);
-    dateFormat.setTimeZone(timeZone);
-    if(stepExecutionData.getStartTime() != null) {
-      this.startDate = dateFormat.format(stepExecutionData.getStartTime());
-      this.startTime = timeFormat.format(stepExecutionData.getStartTime());
-      Date endTime = stepExecutionData.getEndTime() != null? stepExecutionData.getEndTime():new Date();
-      this.durationMillis = endTime.getTime() - stepExecutionData.getStartTime().getTime();
-      this.duration = durationFormat.format(new Date(this.durationMillis));
+public class StepExecutionInfoResponse {
+  private final Long stepExecutionId;
+  private final Long jobExecutionId;
+  private final String jobName;
+  private final String stepName;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime startTime;
+  @JsonSerialize(converter = OffsetDateTimeToStringConverter.class)
+  private final OffsetDateTime endTime;
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @ApiModelProperty(dataType = "java.lang.String", example = "PT5.311S")
+  private final Duration duration;
+  private final BatchStatus batchStatus;
+  @ApiModelProperty(example = "COMPLETED", allowableValues = "UNKNOWN, EXECUTING, COMPLETED, NOOP, FAILED, STOPPED")
+  private final String exitCode;
+  private final String exitDescription;
+
+
+  public StepExecutionInfoResponse(StepExecution stepExecution) {
+    this.stepExecutionId = stepExecution.getId();
+    this.stepName = stepExecution.getStepName();
+    this.jobName = stepExecution.getJobExecution() != null && stepExecution.getJobExecution().getJobInstance() != null ? stepExecution.getJobExecution().getJobInstance().getJobName() : "?";
+    this.jobExecutionId = stepExecution.getJobExecutionId();
+    this.startTime = toOffsetDateTime(stepExecution.getStartTime());
+    this.endTime = toOffsetDateTime(stepExecution.getEndTime());
+
+    if(this.startTime != null && this.endTime != null) {
+      this.duration = Duration.between(this.startTime, this.endTime);
+    }
+    else {
+      this.duration = null;
     }
 
+    this.batchStatus = stepExecution.getStatus();
+    if (stepExecution.getExitStatus() != null) {
+      this.exitCode = stepExecution.getExitStatus().getExitCode();
+      this.exitDescription = stepExecution.getExitStatus().getExitDescription();
+    }
+    else {
+      this.exitCode = null;
+      this.exitDescription = null;
+    }
   }
 
-  public Long getId() {
-    return this.id;
+  public Long getStepExecutionId() {
+    return this.stepExecutionId;
   }
 
   public Long getJobExecutionId() {
     return this.jobExecutionId;
   }
 
-  public String getName() {
-    return this.name;
+  public String getStepName() {
+    return this.stepName;
   }
 
   public String getJobName() {
     return this.jobName;
   }
 
-  public String getStartDate() {
-    return this.startDate;
-  }
-
-  public String getStartTime() {
-    return this.startTime;
+  public OffsetDateTime getStartTime() {
+    return startTime;
   }
 
-  public String getDuration() {
-    return this.duration;
+  public OffsetDateTime getEndTime() {
+    return endTime;
   }
 
-  public long getDurationMillis() {
-    return this.durationMillis;
+  public Duration getDuration() {
+    return duration;
   }
 
-  public String getStatus() {
-    return this.id != null?this.stepExecutionData.getStatus().toString():"NONE";
+  public BatchStatus getBatchStatus() {
+    return batchStatus;
   }
 
   public String getExitCode() {
-    return this.id != null?this.stepExecutionData.getExitStatus().getExitCode():"NONE";
+    return exitCode;
   }
 
-  @JsonIgnore
-  public StepExecutionData getStepExecution() {
-    return this.stepExecutionData;
+  public String getExitDescription() {
+    return exitDescription;
   }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
index 26f9ed4..d2404e5 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
@@ -21,16 +21,16 @@ package org.apache.ambari.infra.model;
 import org.springframework.batch.admin.history.StepExecutionHistory;
 import org.springframework.batch.admin.web.StepExecutionProgress;
 
-public class StepExecutionProgressResponse {
+import com.fasterxml.jackson.annotation.JsonProperty;
 
-  private StepExecutionProgress stepExecutionProgress;
+public class StepExecutionProgressResponse {
 
-  private StepExecutionHistory stepExecutionHistory;
+  private final StepExecutionProgress stepExecutionProgress;
 
-  private StepExecutionInfoResponse stepExecutionInfoResponse;
+  private final StepExecutionHistory stepExecutionHistory;
 
-  public StepExecutionProgressResponse() {
-  }
+  @JsonProperty("stepExecution")
+  private final StepExecutionInfoResponse stepExecutionInfoResponse;
 
   public StepExecutionProgressResponse(StepExecutionProgress stepExecutionProgress, StepExecutionHistory stepExecutionHistory,
                                        StepExecutionInfoResponse stepExecutionInfoResponse) {
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
deleted file mode 100644
index 28e262a..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model.wrapper;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.google.common.collect.Lists;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.item.ExecutionContext;
-
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-
-/**
- * Wrapper for #{{@link JobExecution}}
- */
-public class JobExecutionData {
-
-  private JobExecution jobExecution;
-
-  public JobExecutionData(JobExecution jobExecution) {
-    this.jobExecution = jobExecution;
-  }
-
-  @JsonIgnore
-  public JobExecution getJobExecution() {
-    return jobExecution;
-  }
-
-  @JsonIgnore
-  public Collection<StepExecution> getStepExecutions() {
-    return jobExecution.getStepExecutions();
-  }
-
-  public JobParameters getJobParameters() {
-    return jobExecution.getJobParameters();
-  }
-
-  public JobInstance getJobInstance() {
-    return jobExecution.getJobInstance();
-  }
-
-  public Collection<StepExecutionData> getStepExecutionDataList() {
-    List<StepExecutionData> stepExecutionDataList = Lists.newArrayList();
-    Collection<StepExecution> stepExecutions = getStepExecutions();
-    if (stepExecutions != null) {
-      for (StepExecution stepExecution : stepExecutions) {
-        stepExecutionDataList.add(new StepExecutionData(stepExecution));
-      }
-    }
-    return stepExecutionDataList;
-  }
-
-  public BatchStatus getStatus() {
-    return jobExecution.getStatus();
-  }
-
-  public Date getStartTime() {
-    return jobExecution.getStartTime();
-  }
-
-  public Date getCreateTime() {
-    return jobExecution.getCreateTime();
-  }
-
-  public Date getEndTime() {
-    return jobExecution.getEndTime();
-  }
-
-  public Date getLastUpdated() {
-    return jobExecution.getLastUpdated();
-  }
-
-  public ExitStatus getExitStatus() {
-    return jobExecution.getExitStatus();
-  }
-
-  public ExecutionContext getExecutionContext() {
-    return jobExecution.getExecutionContext();
-  }
-
-  public List<Throwable> getFailureExceptions() {
-    return jobExecution.getFailureExceptions();
-  }
-
-  public String getJobConfigurationName() {
-    return jobExecution.getJobConfigurationName();
-  }
-
-  public Long getId() {
-    return jobExecution.getId();
-  }
-
-  public Long getJobId() {
-    return jobExecution.getJobId();
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
deleted file mode 100644
index 26552ae..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model.wrapper;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.item.ExecutionContext;
-
-import java.util.Date;
-import java.util.List;
-
-/**
- * Wrapper for #{{@link StepExecution}}
- */
-public class StepExecutionData {
-
-  @JsonIgnore
-  private final JobExecution jobExecution;
-
-  @JsonIgnore
-  private final StepExecution stepExecution;
-
-
-  public StepExecutionData(StepExecution stepExecution) {
-    this.stepExecution = stepExecution;
-    this.jobExecution = stepExecution.getJobExecution();
-  }
-
-  @JsonIgnore
-  public JobExecution getJobExecution() {
-    return jobExecution;
-  }
-
-  @JsonIgnore
-  public StepExecution getStepExecution() {
-    return stepExecution;
-  }
-
-  public String getStepName() {
-    return stepExecution.getStepName();
-  }
-
-  public int getReadCount() {
-    return stepExecution.getReadCount();
-  }
-
-  public BatchStatus getStatus() {
-    return stepExecution.getStatus();
-  }
-
-  public int getWriteCount() {
-    return stepExecution.getWriteCount();
-  }
-
-  public int getCommitCount() {
-    return stepExecution.getCommitCount();
-  }
-
-  public int getRollbackCount() {
-    return stepExecution.getRollbackCount();
-  }
-
-  public int getReadSkipCount() {
-    return stepExecution.getReadSkipCount();
-  }
-
-  public int getProcessSkipCount() {
-    return stepExecution.getProcessSkipCount();
-  }
-
-  public Date getStartTime() {
-    return stepExecution.getStartTime();
-  }
-
-  public int getWriteSkipCount() {
-    return stepExecution.getWriteSkipCount();
-  }
-
-  public Date getEndTime() {
-    return stepExecution.getEndTime();
-  }
-
-  public Date getLastUpdated() {
-    return stepExecution.getLastUpdated();
-  }
-
-  public ExecutionContext getExecutionContext() {
-    return stepExecution.getExecutionContext();
-  }
-
-  public ExitStatus getExitStatus() {
-    return stepExecution.getExitStatus();
-  }
-
-  public boolean isTerminateOnly() {
-    return stepExecution.isTerminateOnly();
-  }
-
-  public int getFilterCount() {
-    return stepExecution.getFilterCount();
-  }
-
-  public List<Throwable> getFailureExceptions() {
-    return stepExecution.getFailureExceptions();
-  }
-
-  public Long getId() {
-    return stepExecution.getId();
-  }
-
-  public Long getJobExecutionId() {
-    return stepExecution.getJobExecutionId();
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java
deleted file mode 100644
index 18dfdd9..0000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.rest;
-
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.infra.doc.InfraManagerApiDocStorage;
-import org.springframework.context.annotation.Scope;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-@Path("swagger.{type:json|yaml}")
-@Named
-@Scope("request")
-public class ApiDocResource {
-
-  @Inject
-  private InfraManagerApiDocStorage infraManagerApiDocStorage;
-
-  @GET
-  @Produces({MediaType.APPLICATION_JSON, "application/yaml"})
-  @ApiOperation(value = "The swagger definition in either JSON or YAML", hidden = true)
-  public Response swaggerDefinitionResponse(@PathParam("type") String type) {
-    Response response = Response.status(404).build();
-    if (infraManagerApiDocStorage.getSwagger() != null) {
-      if ("yaml".equalsIgnoreCase(type)) {
-        response = Response.ok().entity(infraManagerApiDocStorage.getSwaggerYaml()).type("application/yaml").build();
-      } else {
-        response = Response.ok().entity(infraManagerApiDocStorage.getSwagger()).build();
-      }
-    }
-    return response;
-  }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
index 079cce3..9f47bdd 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
@@ -19,9 +19,17 @@
 package org.apache.ambari.infra.rest;
 
 
-import com.google.common.collect.Maps;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.Map;
+
+import javax.batch.operations.JobExecutionAlreadyCompleteException;
+import javax.inject.Named;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.core.JobParametersInvalidException;
 import org.springframework.batch.core.launch.JobExecutionNotFailedException;
@@ -38,19 +46,13 @@ import org.springframework.batch.core.repository.JobRestartException;
 import org.springframework.batch.core.step.NoSuchStepException;
 import org.springframework.web.bind.MethodArgumentNotValidException;
 
-import javax.batch.operations.JobExecutionAlreadyCompleteException;
-import javax.inject.Named;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.Map;
+import com.google.common.collect.Maps;
 
 @Named
 @Provider
 public class JobExceptionMapper implements ExceptionMapper<Throwable> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class);
+  private static final Logger logger = LogManager.getLogger(JobExceptionMapper.class);
 
   private static final Map<Class, Response.Status> exceptionStatusCodeMap = Maps.newHashMap();
 
@@ -75,13 +77,13 @@ public class JobExceptionMapper implements ExceptionMapper<Throwable> {
 
   @Override
   public Response toResponse(Throwable throwable) {
-    LOG.error("REST Exception occurred:", throwable);
+    logger.error("REST Exception occurred:", throwable);
     Response.Status status = Response.Status.INTERNAL_SERVER_ERROR;
 
     for (Map.Entry<Class, Response.Status> entry : exceptionStatusCodeMap.entrySet()) {
       if (throwable.getClass().isAssignableFrom(entry.getKey())) {
         status = entry.getValue();
-        LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
+        logger.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
         break;
       }
     }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
index 502057e..f5de3c0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
@@ -18,17 +18,29 @@
  */
 package org.apache.ambari.infra.rest;
 
-import com.google.common.base.Splitter;
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiOperation;
+import java.util.List;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.validation.Valid;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+
 import org.apache.ambari.infra.manager.JobManager;
 import org.apache.ambari.infra.model.ExecutionContextResponse;
-import org.apache.ambari.infra.model.JobDetailsResponse;
 import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
 import org.apache.ambari.infra.model.JobExecutionInfoResponse;
 import org.apache.ambari.infra.model.JobExecutionRequest;
 import org.apache.ambari.infra.model.JobExecutionRestartRequest;
 import org.apache.ambari.infra.model.JobExecutionStopRequest;
+import org.apache.ambari.infra.model.JobInstanceDetailsResponse;
 import org.apache.ambari.infra.model.JobInstanceStartRequest;
 import org.apache.ambari.infra.model.JobRequest;
 import org.apache.ambari.infra.model.PageRequest;
@@ -36,8 +48,8 @@ import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
 import org.apache.ambari.infra.model.StepExecutionRequest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.admin.web.JobInfo;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -51,26 +63,18 @@ import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteExcep
 import org.springframework.batch.core.repository.JobRestartException;
 import org.springframework.context.annotation.Scope;
 
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.validation.Valid;
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.BeanParam;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import java.util.List;
-import java.util.Set;
+import com.google.common.base.Splitter;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.Authorization;
 
-@Api(value = "jobs", description = "Job operations")
+@Api(value = "jobs", description = "Job operations", authorizations = {@Authorization(value = "basicAuth")})
 @Path("jobs")
 @Named
 @Scope("request")
 public class JobResource {
-  private static final Logger LOG = LoggerFactory.getLogger(JobResource.class);
+  private static final Logger logger = LogManager.getLogger(JobResource.class);
 
   @Inject
   private JobManager jobManager;
@@ -94,7 +98,7 @@ public class JobResource {
     String params = request.getParams();
     JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
     if (params != null) {
-      LOG.info("Parsing parameters of job {} '{}'", jobName, params);
+      logger.info("Parsing parameters of job {} '{}'", jobName, params);
       Splitter.on(',')
               .trimResults()
               .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults())
@@ -116,7 +120,7 @@ public class JobResource {
   @Produces({"application/json"})
   @Path("{jobName}/info")
   @ApiOperation("Get job details by job name.")
-  public JobDetailsResponse getJobDetails(@BeanParam @Valid JobRequest jobRequest) throws NoSuchJobException {
+  public List<JobInstanceDetailsResponse> getJobDetails(@BeanParam @Valid JobRequest jobRequest) throws NoSuchJobException {
     return jobManager.getJobDetails(jobRequest.getJobName(), jobRequest.getPage(), jobRequest.getSize());
   }
 
@@ -132,7 +136,7 @@ public class JobResource {
   @Produces({"application/json"})
   @Path("/executions/{jobExecutionId}")
   @ApiOperation("Get job and step details for job execution instance.")
-  public JobExecutionDetailsResponse getExectionInfo(@PathParam("jobExecutionId") @Valid Long jobExecutionId) throws NoSuchJobExecutionException {
+  public JobExecutionDetailsResponse getExecutionInfo(@PathParam("jobExecutionId") @Valid Long jobExecutionId) throws NoSuchJobExecutionException {
     return jobManager.getExecutionInfo(jobExecutionId);
   }
 
@@ -165,8 +169,8 @@ public class JobResource {
   @GET
   @Produces({"application/json"})
   @Path("/{jobName}/{jobInstanceId}/executions")
-  @ApiOperation("Get execution for job instance.")
-  public List<JobExecutionInfoResponse> getExecutionsForInstance(@BeanParam @Valid JobExecutionRequest request) throws
+  @ApiOperation("Get execution of job instance.")
+  public List<JobExecutionInfoResponse> getExecutionsOfInstance(@BeanParam @Valid JobExecutionRequest request) throws
           NoSuchJobException, NoSuchJobInstanceException {
     return jobManager.getExecutionsForJobInstance(request.getJobName(), request.getJobInstanceId());
   }
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
index 9a371fd..dc39b06 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
@@ -17,20 +17,20 @@
 # Extend with java options or system properties. e.g.: INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n"
 export INFRA_MANAGER_OPTS=""
 
-# Log Search debug options
-# export INFRA_MANAGER_DEBUG=true
-# export INFRA_MANAGER_DEBUG_SUSPEND=n
+# Infra Manager debug options
+#export INFRA_MANAGER_DEBUG=true
+#export INFRA_MANAGER_DEBUG_SUSPEND=n
 export INFRA_MANAGER_DEBUG_PORT=5005
 
-# Log Search memory
+# Infra Manager memory
 # export INFRA_MANAGER_JAVA_MEM="--Xmx1024m"
 
-# export LOG_PATH=/var/log/ambari-logsearch-logfeeder/
-# export LOG_FILE=logsearch.log
+#export LOG_PATH=/var/log/ambari-infra-manager/
+#export LOG_FILE=infra-manager.log
 
 # Pid file of the application
-# export INFRA_MANAGER_PID_DIR=/var/run/ambari-infra-manager
-# export INFRA_MANAGER_PID_FILE=infra-manager.pid
+#export INFRA_MANAGER_PID_DIR=/var/run/ambari-infra-manager
+#export INFRA_MANAGER_PID_FILE=infra-manager.pid
 
 # SSL settings"
 # export INFRA_MANAGER_SSL="true"
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
index a0712ba..c3ec9d0 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
@@ -17,10 +17,10 @@ infra-manager.batch.db.file=job-repository.db
 infra-manager.batch.db.init=false
 infra-manager.batch.db.username=admin
 infra-manager.batch.db.password=admin
-management.security.enabled=false
-management.health.solr.enabled=false
 infra-manager.server.data.folder=/tmp/ambariInfraManager
+infra-manager.admin-user.password=admin
 
+# Archive Service Logs
 infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
@@ -28,47 +28,58 @@ infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logt
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
 infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
+#infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL
+infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS
+infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archives/service_logs
 infra-manager.jobs.solr_data_archiving.archive_service_logs.local_destination_directory=/tmp/ambariInfraManager
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
 infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_service_logs.ttl=PT24H
+infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=false
 infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 * * * * ?
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.intervalEndDelta=PT24H
+# Archive Audit Logs
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=logtime:[${start} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=evtTime:[${start} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(evtTime:${evtTime} AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=evtTime
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime} TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=HDFS
 # TODO: logtime may not be enough: The same filename can be generated when more than write_block_size count docs has the same logtime value
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=evtTime
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/test_audit_logs
-#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=<any>.csv
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569
-# TODO: configure ranger audit logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.zoo_keeper_connection_string=zookeeper:2181
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.read_block_size=100
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.write_block_size=150
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.file_name_suffix_column=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.destination_directory_path=/tmp/ambariInfraManager
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.collection=hadoop_logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.query_text=logtime:[* TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.filter_query_text=(logtime:"${logtime}" AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[0]=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[1]=id
+#infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/archives/audit_logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.local_destination_directory=/tmp/ambariInfraManager
+# Archive Ranger Audit Logs
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.collection=ranger_audits
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.query_text=evtTime:[${start} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.filter_query_text=(evtTime:${evtTime} AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.sort_column[0]=evtTime
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.sort_column[1]=id
+#infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime} TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.read_block_size=10000
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.write_block_size=10000
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.destination=LOCAL
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.local_destination_directory=/tmp/ambariInfraManager
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.file_name_suffix_column=evtTime
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.ttl=PT24H
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.scheduling.enabled=false
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.scheduling.cron=0 * * * * ?
+
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=logtime
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=evtTime
+infra-manager.jobs.clean-up.ttl=PT240H
+infra-manager.jobs.clean-up.scheduling.enabled=true
+infra-manager.jobs.clean-up.scheduling.cron=0 * * * * ?
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
index 5ac32e3..922ce0c 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
@@ -72,7 +72,13 @@ else
   INFRA_MANAGER_GC_LOGFILE="$LOG_PATH_WITHOUT_SLASH/$INFRA_MANAGER_GC_LOGFILE"
 fi
 
-INFRA_MANAGER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$INFRA_MANAGER_GC_LOGFILE"
+java_version=$($JVM -version 2>&1 | grep 'version' | cut -d'"' -f2 | cut -d'.' -f2)
+if [ $java_version == "8" ]; then
+  INFRA_MANAGER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$INFRA_MANAGER_GC_LOGFILE"
+else
+  INFRA_MANAGER_GC_OPTS="-Xlog:gc*:file=$INFRA_MANAGER_GC_LOGFILE:time"
+fi
+
 
 function print_usage() {
   cat << EOF
@@ -138,7 +144,12 @@ function start() {
   INFRA_MANAGER_DEBUG_PORT=${INFRA_MANAGER_DEBUG_PORT:-"5005"}
 
   if [ "$INFRA_MANAGER_DEBUG" = "true" ]; then
-    INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_PORT,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND "
+    if [ $java_version == "8" ]; then
+      INFRA_MANAGER_DEBUG_ADDRESS=$INFRA_MANAGER_DEBUG_PORT
+    else
+      INFRA_MANAGER_DEBUG_ADDRESS="*:$INFRA_MANAGER_DEBUG_PORT"
+    fi
+    INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_ADDRESS,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND "
   fi
 
   if [ "$INFRA_MANAGER_SSL" = "true" ]; then
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
index d3db3d7..80819c8 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
@@ -24,7 +24,7 @@
       <Layout type="PatternLayout" pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36}:%L - %msg%n" />
     </Appender>
     <RollingFile name="File" fileName="${logging.file}" filePattern="${logging.file}-%i-%d{yyyy-MM-dd}">
-      <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36}:%L - %msg%n" />
+      <PatternLayout pattern="%d{ISO8601} [%t] %-5p %C{6} (%F:%L) - %m%n" />
       <Policies>
         <TimeBasedTriggeringPolicy />
         <SizeBasedTriggeringPolicy size="10 MB"/>
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
index 4d261e7..69478fd 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
@@ -1,4 +1,4 @@
-<!DOCTYPE html>
+<!-- HTML for static distribution bundle build -->
 <!--
  Licensed to the Apache Software Foundation (ASF) under one or more
  contributor license agreements.  See the NOTICE file distributed with
@@ -15,122 +15,63 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
-<html>
+<!DOCTYPE html>
+<html lang="en">
 <head>
     <meta charset="UTF-8">
     <title>Infra Manager REST API</title>
-    <link rel="icon" type="image/png" href="swagger-ui/2.2.2/images/favicon-32x32.png" sizes="32x32" />
-    <link rel="icon" type="image/png" href="swagger-ui/2.2.2/images/favicon-16x16.png" sizes="16x16" />
-    <link href='swagger-ui/2.2.2/css/typography.css' media='screen' rel='stylesheet' type='text/css'/>
-    <link href='swagger-ui/2.2.2/css/reset.css' media='screen' rel='stylesheet' type='text/css'/>
-    <link href='swagger-ui/2.2.2/css/screen.css' media='screen' rel='stylesheet' type='text/css'/>
-    <link href='swagger-ui/2.2.2/css/reset.css' media='print' rel='stylesheet' type='text/css'/>
-    <link href='swagger-ui/2.2.2/css/print.css' media='print' rel='stylesheet' type='text/css'/>
-
-    <script src='swagger-ui/2.2.2/lib/object-assign-pollyfill.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/jquery-1.8.0.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/jquery.slideto.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/jquery.wiggle.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/jquery.ba-bbq.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/handlebars-4.0.5.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/lodash.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/backbone-min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/swagger-ui.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/highlight.9.1.0.pack.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/highlight.9.1.0.pack_extended.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/jsoneditor.min.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/marked.js' type='text/javascript'></script>
-    <script src='swagger-ui/2.2.2/lib/swagger-oauth.js' type='text/javascript'></script>
-
-    <!-- Some basic translations -->
-    <!-- <script src='lang/translator.js' type='text/javascript'></script> -->
-    <!-- <script src='lang/ru.js' type='text/javascript'></script> -->
-    <!-- <script src='lang/en.js' type='text/javascript'></script> -->
-
-    <script type="text/javascript">
-        $(function () {
-            var url = window.location.search.match(/url=([^&]+)/);
-            if (url && url.length > 1) {
-                url = decodeURIComponent(url[1]);
-            } else {
-                var urlPrefix = location.protocol +'//'+ location.hostname+(location.port ? ':'+location.port: '');
-                url = urlPrefix + "/api/v1/swagger.yaml";
-            }
+    <link rel="stylesheet" type="text/css" href="swagger-ui/3.19.0/swagger-ui.css" >
+    <link rel="icon" type="image/png" href="swagger-ui/3.19.0/images/favicon-32x32.png" sizes="32x32" />
+    <link rel="icon" type="image/png" href="swagger-ui/3.19.0/images/favicon-16x16.png" sizes="16x16" />
+    <style>
+      html
+      {
+        box-sizing: border-box;
+        overflow: -moz-scrollbars-vertical;
+        overflow-y: scroll;
+      }
 
-            hljs.configure({
-                highlightSizeThreshold: 5000
-            });
+      *,
+      *:before,
+      *:after
+      {
+        box-sizing: inherit;
+      }
 
-            // Pre load translate...
-            if(window.SwaggerTranslator) {
-                window.SwaggerTranslator.translate();
-            }
-            window.swaggerUi = new SwaggerUi({
-                url: url,
-                dom_id: "swagger-ui-container",
-                supportedSubmitMethods: ['get', 'post', 'put', 'delete', 'patch'],
-                onComplete: function(swaggerApi, swaggerUi){
-                    if(typeof initOAuth == "function") {
-                        initOAuth({
-                            clientId: "your-client-id",
-                            clientSecret: "your-client-secret-if-required",
-                            realm: "your-realms",
-                            appName: "your-app-name",
-                            scopeSeparator: " ",
-                            additionalQueryStringParams: {}
-                        });
-                    }
-
-                    if(window.SwaggerTranslator) {
-                        window.SwaggerTranslator.translate();
-                    }
-                },
-                onFailure: function(data) {
-                    log("Unable to Load SwaggerUI");
-                },
-                docExpansion: "none",
-                jsonEditor: false,
-                defaultModelRendering: 'schema',
-                showRequestHeaders: false
-            });
-
-            function addApiKeyAuthorization(){
-                var username = encodeURIComponent($('#input_username')[0].value);
-                var password = encodeURIComponent($('#input_password')[0].value);
-                if (username && username.trim() != "" && password && password != "") {
-                    var apiKeyAuth = new SwaggerClient.PasswordAuthorization("Authorization", username, password);
-                    window.swaggerUi.api.clientAuthorizations.add("key", apiKeyAuth);
-                    log("added authorization header: " + 'Basic ' + btoa(username + ':' + password));
-                }
-            }
-
-            $('#input_username, #input_password').change(addApiKeyAuthorization);
+      body
+      {
+        margin:0;
+        background: #fafafa;
+      }
+    </style>
+</head>
 
-            window.swaggerUi.load();
+<body>
+<div id="swagger-ui"></div>
 
-            function log() {
-                if ('console' in window) {
-                    console.log.apply(console, arguments);
-                }
-            }
-        });
-    </script>
-</head>
+<script src="swagger-ui/3.19.0/swagger-ui-bundle.js"> </script>
+<script src="swagger-ui/3.19.0/swagger-ui-standalone-preset.js"> </script>
+<script>
+    window.onload = function() {
 
-<body class="swagger-section">
-<div id='header'>
-    <div class="swagger-ui-wrap">
-        <a id="logo" href="http://swagger.io">swagger</a>
-        <form id='api_selector'>
-            <div class='input'><input placeholder="http://example.com/api" id="input_baseUrl" name="baseUrl" type="text"/></div>
-            <div class="input"><input placeholder="username" id="input_username" name="username" type="text" size="10"></div>
-            <div class="input"><input placeholder="password" id="input_password" name="password" type="password" size="10"></div>
-            <div class='input'><a id="explore" href="#">Explore</a></div>
-        </form>
-    </div>
-</div>
+      var urlPrefix = location.protocol +'//'+ location.hostname+(location.port ? ':'+location.port: '');
+      // Build a system
+      const ui = SwaggerUIBundle({
+        url: urlPrefix + "/swagger.yaml",
+        dom_id: '#swagger-ui',
+        deepLinking: true,
+        presets: [
+          SwaggerUIBundle.presets.apis,
+          SwaggerUIStandalonePreset
+        ],
+        plugins: [
+          SwaggerUIBundle.plugins.DownloadUrl
+        ],
+        layout: "StandaloneLayout"
+      })
 
-<div id="message-bar" class="swagger-ui-wrap" data-sw-translate>&nbsp;</div>
-<div id="swagger-ui-container" class="swagger-ui-wrap"></div>
+      window.ui = ui
+    }
+  </script>
 </body>
-</html>
\ No newline at end of file
+</html>
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositeSecretTest.java
similarity index 70%
rename from ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java
rename to ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositeSecretTest.java
index 26a6953..78b0269 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositeSecretTest.java
@@ -1,11 +1,11 @@
 package org.apache.ambari.infra.conf.security;
 
-import org.junit.Test;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
 
 import java.util.Optional;
 
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -25,24 +25,24 @@ import static org.junit.Assert.assertThat;
  * specific language governing permissions and limitations
  * under the License.
  */
-public class CompositePasswordStoreTest {
+public class CompositeSecretTest {
   @Test
   public void testGetPasswordReturnNullIfNoPasswordStoresWereAdded() {
-    assertThat(new CompositePasswordStore().getPassword("any").isPresent(), is(false));
+    assertThat(new CompositeSecret().get().isPresent(), is(false));
   }
 
   @Test
   public void testGetPasswordReturnNullIfPasswordNotFoundInAnyStore() {
-    assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.empty()).getPassword("any").isPresent(), is(false));
+    assertThat(new CompositeSecret(Optional::empty, Optional::empty).get().isPresent(), is(false));
   }
 
   @Test
   public void testGetPasswordReturnPasswordFromFirstStoreIfExists() {
-    assertThat(new CompositePasswordStore((prop) -> Optional.of("Pass"), (prop) -> Optional.empty()).getPassword("any").get(), is("Pass"));
+    assertThat(new CompositeSecret(() -> Optional.of("Pass"), Optional::empty).get().get(), is("Pass"));
   }
 
   @Test
   public void testGetPasswordReturnPasswordFromSecondStoreIfNotExistsInFirst() {
-    assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.of("Pass")).getPassword("any").get(), is("Pass"));
+    assertThat(new CompositeSecret(Optional::empty, () -> Optional.of("Pass")).get().get(), is("Pass"));
   }
 }
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/env/TestAppConfig.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/env/TestAppConfig.java
new file mode 100644
index 0000000..6d07ecd
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/env/TestAppConfig.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.env;
+
+import javax.sql.DataSource;
+
+import org.springframework.batch.admin.service.JdbcSearchableJobExecutionDao;
+import org.springframework.batch.admin.service.JdbcSearchableJobInstanceDao;
+import org.springframework.batch.admin.service.SearchableJobExecutionDao;
+import org.springframework.batch.admin.service.SearchableJobInstanceDao;
+import org.springframework.batch.core.repository.ExecutionContextSerializer;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer;
+import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.Resource;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.datasource.DataSourceTransactionManager;
+import org.springframework.jdbc.datasource.DriverManagerDataSource;
+import org.springframework.jdbc.datasource.init.DataSourceInitializer;
+import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.support.TransactionTemplate;
+import org.sqlite.SQLiteConfig;
+
+@Configuration
+@ComponentScan(basePackages = {"org.apache.ambari.infra.env"})
+public class TestAppConfig {
+
+  @Value("classpath:org/springframework/batch/core/schema-drop-sqlite.sql")
+  private Resource dropRepositoryTables;
+
+  @Value("classpath:org/springframework/batch/core/schema-sqlite.sql")
+  private Resource dataRepositorySchema;
+
+  @Bean
+  public DataSource dataSource() {
+    DriverManagerDataSource dataSource = new DriverManagerDataSource();
+    dataSource.setDriverClassName("org.sqlite.JDBC");
+    dataSource.setUrl("jdbc:sqlite:test.db");
+    dataSource.setUsername("test");
+    dataSource.setPassword("test");
+    SQLiteConfig config = new SQLiteConfig();
+    config.enforceForeignKeys(true);
+    dataSource.setConnectionProperties(config.toProperties());
+    return dataSource;
+  }
+
+  @Bean
+  public DataSourceInitializer dataSourceInitializer() {
+    ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
+    databasePopulator.addScript(dropRepositoryTables);
+    databasePopulator.setIgnoreFailedDrops(true);
+    databasePopulator.addScript(dataRepositorySchema);
+    databasePopulator.setContinueOnError(true);
+
+    DataSourceInitializer initializer = new DataSourceInitializer();
+    initializer.setDataSource(dataSource());
+    initializer.setDatabasePopulator(databasePopulator);
+
+    return initializer;
+  }
+
+  @Bean
+  public JdbcTemplate jdbcTemplate(DataSource dataSource) {
+    return new JdbcTemplate(dataSource);
+  }
+
+  @Bean
+  public SearchableJobInstanceDao searchableJobInstanceDao(JdbcTemplate jdbcTemplate) {
+    JdbcSearchableJobInstanceDao dao = new JdbcSearchableJobInstanceDao();
+    dao.setJdbcTemplate(jdbcTemplate);
+    return dao;
+  }
+
+  @Bean
+  public SearchableJobExecutionDao searchableJobExecutionDao(JdbcTemplate jdbcTemplate, DataSource dataSource) {
+    JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao();
+    dao.setJdbcTemplate(jdbcTemplate);
+    dao.setDataSource(dataSource);
+    return dao;
+  }
+
+  @Bean
+  public ExecutionContextSerializer executionContextSerializer() {
+    return new Jackson2ExecutionContextStringSerializer();
+  }
+
+  @Bean
+  public PlatformTransactionManager transactionManager(DataSource dataSource) {
+    return new DataSourceTransactionManager(dataSource);
+  }
+
+  @Bean
+  public TransactionTemplate transactionTemplate(PlatformTransactionManager transactionManager) {
+    return new TransactionTemplate(transactionManager);
+  }
+
+  @Bean
+  public JobRepository jobRepository(ExecutionContextSerializer executionContextSerializer, PlatformTransactionManager transactionManager) throws Exception {
+    JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
+    factory.setDataSource(dataSource());
+    factory.setTransactionManager(transactionManager);
+    factory.setSerializer(executionContextSerializer);
+    factory.afterPropertiesSet();
+    return factory.getObject();
+  }
+
+}
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/InfraJobExecutionDAOIT.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/InfraJobExecutionDAOIT.java
new file mode 100644
index 0000000..7128cbb
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/InfraJobExecutionDAOIT.java
@@ -0,0 +1,99 @@
+package org.apache.ambari.infra.job;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.nullValue;
+
+import java.time.OffsetDateTime;
+import java.util.Date;
+
+import javax.inject.Inject;
+
+import org.apache.ambari.infra.env.TestAppConfig;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.admin.service.SearchableJobExecutionDao;
+import org.springframework.batch.admin.service.SearchableJobInstanceDao;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersBuilder;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.transaction.support.TransactionTemplate;
+
+@RunWith(SpringJUnit4ClassRunner.class)
+@ContextConfiguration(classes = {TestAppConfig.class})
+public class InfraJobExecutionDAOIT {
+
+  private static int jobCounter = 0;
+
+  @Inject
+  private JdbcTemplate jdbcTemplate;
+  @Inject
+  private TransactionTemplate transactionTemplate;
+  @Inject
+  private JobRepository jobRepository;
+  @Inject
+  private SearchableJobExecutionDao searchableJobExecutionDao;
+  @Inject
+  private SearchableJobInstanceDao searchableJobInstanceDao;
+  private InfraJobExecutionDao infraJobExecutionDao;
+
+  @Before
+  public void setUp() {
+    infraJobExecutionDao = new InfraJobExecutionDao(jdbcTemplate, transactionTemplate);
+  }
+
+  @Test
+  public void testDeleteJobExecutions() throws Exception {
+    JobExecution yesterdayJob = newJobAt(OffsetDateTime.now().minusDays(1));
+    JobExecution todayJob = newJobAt(OffsetDateTime.now());
+
+    infraJobExecutionDao.deleteJobExecutions(OffsetDateTime.now().minusHours(1));
+
+    assertThat(searchableJobExecutionDao.getJobExecution(todayJob.getId()), is(not(nullValue())));
+    assertThat(searchableJobExecutionDao.getJobExecution(yesterdayJob.getId()), is(nullValue()));
+
+    assertThat(searchableJobInstanceDao.getJobInstance(todayJob.getJobId()), is(not(nullValue())));
+    assertThat(searchableJobInstanceDao.getJobInstance(yesterdayJob.getJobId()), is(nullValue()));
+  }
+
+  private JobExecution newJobAt(OffsetDateTime createdAt) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException {
+    JobParameters jobParameters = new JobParametersBuilder().addString("test param", "test value").toJobParameters();
+    JobExecution jobExecution = jobRepository.createJobExecution("test job" + jobCounter++ , jobParameters);
+    jobExecution.setCreateTime(Date.from(createdAt.toInstant()));
+    jobRepository.update(jobExecution);
+
+    StepExecution stepExecution = new StepExecution("step1", jobExecution);
+    jobRepository.add(stepExecution);
+
+    return jobExecution;
+  }
+}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java
deleted file mode 100644
index 3b7caab..0000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package org.apache.ambari.infra.job;
-
-import org.apache.ambari.infra.job.archive.DocumentArchivingProperties;
-import org.apache.ambari.infra.job.archive.SolrProperties;
-import org.junit.Test;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class JobPropertiesTest {
-  @Test
-  public void testDeepCopy() throws Exception {
-    DocumentArchivingProperties documentArchivingProperties = new DocumentArchivingProperties();
-    documentArchivingProperties.setLocalDestinationDirectory("/tmp");
-    documentArchivingProperties.setFileNameSuffixColumn(".json");
-    documentArchivingProperties.setReadBlockSize(10);
-    documentArchivingProperties.setWriteBlockSize(20);
-    SolrProperties solr = new SolrProperties();
-    solr.setZooKeeperConnectionString("localhost:2181");
-    solr.setFilterQueryText("id:1167");
-    solr.setQueryText("name:'Joe'");
-    solr.setCollection("Users");
-    solr.setSortColumn(new String[] {"name"});
-    documentArchivingProperties.setSolr(solr);
-
-    DocumentArchivingProperties parsed = documentArchivingProperties.deepCopy();
-
-    assertThat(parsed.getLocalDestinationDirectory(), is(documentArchivingProperties.getLocalDestinationDirectory()));
-    assertThat(parsed.getFileNameSuffixColumn(), is(documentArchivingProperties.getFileNameSuffixColumn()));
-    assertThat(parsed.getReadBlockSize(), is(documentArchivingProperties.getReadBlockSize()));
-    assertThat(parsed.getWriteBlockSize(), is(documentArchivingProperties.getWriteBlockSize()));
-    assertThat(parsed.getSolr().getZooKeeperConnectionString(), is(documentArchivingProperties.getSolr().getZooKeeperConnectionString()));
-    assertThat(parsed.getSolr().getQueryText(), is(solr.getQueryText()));
-    assertThat(parsed.getSolr().getFilterQueryText(), is(solr.getFilterQueryText()));
-    assertThat(parsed.getSolr().getCollection(), is(solr.getCollection()));
-    assertThat(parsed.getSolr().getSortColumn(), is(solr.getSortColumn()));
-  }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java
index ba1150f..c10638e 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java
@@ -1,28 +1,3 @@
-package org.apache.ambari.infra.job;
-
-import org.apache.ambari.infra.manager.Jobs;
-import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
-import org.easymock.Mock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-
-import javax.batch.operations.NoSuchJobException;
-import java.util.Optional;
-import java.util.concurrent.ScheduledFuture;
-
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.isA;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -41,6 +16,32 @@ import static org.easymock.EasyMock.isA;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.ambari.infra.job;
+
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.isA;
+
+import java.util.Optional;
+import java.util.concurrent.ScheduledFuture;
+
+import javax.batch.operations.NoSuchJobException;
+
+import org.apache.ambari.infra.manager.Jobs;
+import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.scheduling.TaskScheduler;
+import org.springframework.scheduling.support.CronTrigger;
+
 @RunWith(EasyMockRunner.class)
 public class JobSchedulerTest extends EasyMockSupport {
 
@@ -53,12 +54,12 @@ public class JobSchedulerTest extends EasyMockSupport {
   private JobScheduler jobScheduler;
 
   @Before
-  public void setUp() throws Exception {
+  public void setUp() {
     jobScheduler = new JobScheduler(taskScheduler, jobs);
   }
 
   @After
-  public void tearDown() throws Exception {
+  public void tearDown() {
     verifyAll();
   }
 
@@ -111,4 +112,19 @@ public class JobSchedulerTest extends EasyMockSupport {
 
     jobScheduler.schedule(jobName, schedulingProperties);
   }
+
+  @Test
+  public void testScheduleWhenPreviousExecutionIsUnknownJobIsAbandonedAndScheduled() throws Exception {
+    String jobName = "job0";
+    SchedulingProperties schedulingProperties = new SchedulingProperties();
+    schedulingProperties.setCron("* * * * * ?");
+    JobExecution jobExecution = new JobExecution(1L, new JobParameters());
+    jobExecution.setExitStatus(ExitStatus.UNKNOWN);
+    expect(jobs.lastRun(jobName)).andReturn(Optional.of(jobExecution));
+    jobs.stopAndAbandon(1L); expectLastCall();
+    expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture);
+    replayAll();
+
+    jobScheduler.schedule(jobName, schedulingProperties);
+  }
 }
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
index b31110c..fe2b037 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
@@ -19,6 +19,15 @@
 
 package org.apache.ambari.infra.job.archive;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.HashMap;
+
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.easymock.EasyMockRunner;
 import org.easymock.EasyMockSupport;
@@ -27,8 +36,8 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.springframework.batch.core.BatchStatus;
 import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.StepContribution;
 import org.springframework.batch.core.StepExecution;
 import org.springframework.batch.core.scope.context.ChunkContext;
 import org.springframework.batch.core.scope.context.StepContext;
@@ -36,26 +45,19 @@ import org.springframework.batch.item.ExecutionContext;
 import org.springframework.batch.item.ItemStreamReader;
 import org.springframework.batch.repeat.RepeatStatus;
 
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-
 @RunWith(EasyMockRunner.class)
 public class DocumentExporterTest extends EasyMockSupport {
 
   private static final long JOB_EXECUTION_ID = 1L;
   private static final long STEP_EXECUTION_ID = 1L;
-  private static final Document DOCUMENT_2 = new Document(new HashMap<String, String>() {{
+  private static final Document DOCUMENT_2 = new Document(new HashMap<String, Object>() {{
     put("id", "2");
   }});
-  private static final Document DOCUMENT_3 = new Document(new HashMap<String, String>() {{
+  private static final Document DOCUMENT_3 = new Document(new HashMap<String, Object>() {{
     put("id", "3");
   }});
+  private static final StepContribution ANY_STEP_CONTRIBUTION = new StepContribution(new StepExecution("any", new JobExecution(1L)));
+
   private DocumentExporter documentExporter;
   @Mock
   private ItemStreamReader<Document> reader;
@@ -70,13 +72,12 @@ public class DocumentExporterTest extends EasyMockSupport {
   @Mock
   private JobContextRepository jobContextRepository;
 
-//  private ExecutionContext executionContext;
   private ChunkContext chunkContext;
-  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, Object>() {{ put("id", "1"); }});
 
   @Before
-  public void setUp() throws Exception {
-    chunkContext = chunkContext(BatchStatus.STARTED);
+  public void setUp() {
+    chunkContext = chunkContext(false);
     documentExporter = documentExporter(2);
   }
 
@@ -84,15 +85,16 @@ public class DocumentExporterTest extends EasyMockSupport {
     return new DocumentExporter(reader, documentDestination, writeBlockSize, jobContextRepository);
   }
 
-  private ChunkContext chunkContext(BatchStatus batchStatus) {
+  private ChunkContext chunkContext(boolean terminate) {
     StepExecution stepExecution = new StepExecution("exportDoc", new JobExecution(JOB_EXECUTION_ID));
     stepExecution.setId(STEP_EXECUTION_ID);
-    stepExecution.getJobExecution().setStatus(batchStatus);
+    if (terminate)
+      stepExecution.setTerminateOnly();
     return new ChunkContext(new StepContext(stepExecution));
   }
 
   @After
-  public void tearDown() throws Exception {
+  public void tearDown() {
     verifyAll();
   }
 
@@ -103,7 +105,7 @@ public class DocumentExporterTest extends EasyMockSupport {
     reader.close(); expectLastCall();
     replayAll();
 
-    documentExporter.execute(null, chunkContext);
+    documentExporter.execute(ANY_STEP_CONTRIBUTION, chunkContext);
   }
 
   private ExecutionContext executionContext(ChunkContext chunkContext) {
@@ -121,7 +123,7 @@ public class DocumentExporterTest extends EasyMockSupport {
     documentItemWriter.close(); expectLastCall();
     replayAll();
 
-    assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED));
+    assertThat(documentExporter.execute(ANY_STEP_CONTRIBUTION, chunkContext), is(RepeatStatus.FINISHED));
   }
 
   @Test
@@ -144,7 +146,7 @@ public class DocumentExporterTest extends EasyMockSupport {
     documentItemWriter2.close(); expectLastCall();
     replayAll();
 
-    assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED));
+    assertThat(documentExporter.execute(ANY_STEP_CONTRIBUTION, chunkContext), is(RepeatStatus.FINISHED));
   }
 
   @Test(expected = IOException.class)
@@ -158,7 +160,7 @@ public class DocumentExporterTest extends EasyMockSupport {
     reader.close(); expectLastCall();
     replayAll();
 
-    documentExporter.execute(null, chunkContext);
+    documentExporter.execute(ANY_STEP_CONTRIBUTION, chunkContext);
   }
 
   @Test(expected = UncheckedIOException.class)
@@ -171,12 +173,12 @@ public class DocumentExporterTest extends EasyMockSupport {
     reader.close(); expectLastCall();
     replayAll();
 
-    documentExporter.execute(null, chunkContext);
+    documentExporter.execute(ANY_STEP_CONTRIBUTION, chunkContext);
   }
 
   @Test
   public void testStopAndRestartExportsAllDocuments() throws Exception {
-    ChunkContext stoppingChunkContext = chunkContext(BatchStatus.STOPPING);
+    ChunkContext stoppingChunkContext = chunkContext(true);
     DocumentExporter documentExporter = documentExporter(1);
 
     reader.open(executionContext(chunkContext)); expectLastCall();
@@ -207,9 +209,9 @@ public class DocumentExporterTest extends EasyMockSupport {
     reader.close(); expectLastCall();
     replayAll();
 
-    RepeatStatus repeatStatus = documentExporter.execute(null, this.chunkContext);
+    RepeatStatus repeatStatus = documentExporter.execute(ANY_STEP_CONTRIBUTION, this.chunkContext);
     assertThat(repeatStatus, is(RepeatStatus.CONTINUABLE));
-    repeatStatus = documentExporter.execute(null, this.chunkContext);
+    repeatStatus = documentExporter.execute(ANY_STEP_CONTRIBUTION, this.chunkContext);
     assertThat(repeatStatus, is(RepeatStatus.FINISHED));
   }
 }
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
index 0776c3c..9d2b21a 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
@@ -19,6 +19,14 @@
 
 package org.apache.ambari.infra.job.archive;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.nullValue;
+import static org.junit.Assert.assertThat;
+
+import java.util.HashMap;
+
 import org.apache.ambari.infra.job.CloseableIterator;
 import org.apache.ambari.infra.job.ObjectSource;
 import org.easymock.EasyMockRunner;
@@ -30,19 +38,11 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.springframework.batch.item.ExecutionContext;
 
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.core.Is.is;
-import static org.hamcrest.core.IsNull.nullValue;
-import static org.junit.Assert.assertThat;
-
 @RunWith(EasyMockRunner.class)
 public class DocumentItemReaderTest extends EasyMockSupport {
-  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
-  private static final Document DOCUMENT_2 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
-  private static final Document DOCUMENT_3 = new Document(new HashMap<String, String>() {{ put("id", "3"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, Object>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT_2 = new Document(new HashMap<String, Object>() {{ put("id", "2"); }});
+  private static final Document DOCUMENT_3 = new Document(new HashMap<String, Object>() {{ put("id", "3"); }});
   private static final int READ_BLOCK_SIZE = 2;
 
   private DocumentItemReader documentItemReader;
@@ -117,7 +117,7 @@ public class DocumentItemReaderTest extends EasyMockSupport {
 
   @Test
   public void testReadWhenCollectionContainsMoreElementsThanReadBlockSize() throws Exception {
-    Document document3 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
+    Document document3 = new Document(new HashMap<String, Object>() {{ put("id", "2"); }});
 
     expect(documentSource.open(null, 2)).andReturn(documentIterator);
     expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
index cca2c1a..1fa8434 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
@@ -1,11 +1,11 @@
 package org.apache.ambari.infra.job.archive;
 
-import org.junit.Test;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
 
 import java.util.HashMap;
 
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -42,17 +42,17 @@ public class FileNameSuffixFormatterTest {
 
   @Test(expected = IllegalArgumentException.class)
   public void testFormatWhenSpecifiedColumnContainsBlankValueThrowingException() throws Exception {
-    formatter.format(new Document(new HashMap<String, String>() {{ put("logtime", "  "); }}));
+    formatter.format(new Document(new HashMap<String, Object>() {{ put("logtime", "  "); }}));
   }
 
   @Test
   public void testFormatWhenNoDateFormatSpecifiedRawColumnValueReturned() throws Exception {
     FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", null);
-    assertThat(formatter.format(new Document(new HashMap<String, String>() {{ put("logtime", "Monday"); }})), is("Monday"));
+    assertThat(formatter.format(new Document(new HashMap<String, Object>() {{ put("logtime", "Monday"); }})), is("Monday"));
   }
 
   @Test
   public void testFormatWhenDateFormatIsSpecifiedAFormattedValueReturned() throws Exception {
-    assertThat(formatter.format(new Document(new HashMap<String, String>() {{ put("logtime", "2017-12-15T10:12:33.453Z"); }})), is("2017-12-15T10-12-33-453Z"));
+    assertThat(formatter.format(new Document(new HashMap<String, Object>() {{ put("logtime", "2017-12-15T10:12:33.453Z"); }})), is("2017-12-15T10-12-33-453Z"));
   }
 }
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
index 85e79e1..af8b86d 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
@@ -19,7 +19,19 @@
 
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import static org.easymock.EasyMock.cmp;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.LogicalOperator.EQUAL;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMockRunner;
 import org.easymock.EasyMockSupport;
@@ -29,25 +41,14 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.easymock.EasyMock.cmp;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.LogicalOperator.EQUAL;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 @RunWith(EasyMockRunner.class)
 public class LocalDocumentItemWriterTest extends EasyMockSupport {
 
-  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
-  private static final Document DOCUMENT2 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
-  private static final Document DOCUMENT3 = new Document(new HashMap<String, String>() {{ put("id", "3"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, Object>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT2 = new Document(new HashMap<String, Object>() {{ put("id", "2"); }});
+  private static final Document DOCUMENT3 = new Document(new HashMap<String, Object>() {{ put("id", "3"); }});
   private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
   private LocalDocumentItemWriter localDocumentItemWriter;
@@ -80,9 +81,9 @@ public class LocalDocumentItemWriterTest extends EasyMockSupport {
 
     List<Document> documentList = readBack(outFile);
     assertThat(documentList.size(), is(3));
-    assertThat(documentList.get(0).get("id"), is(DOCUMENT.get("id")));
-    assertThat(documentList.get(1).get("id"), is(DOCUMENT2.get("id")));
-    assertThat(documentList.get(2).get("id"), is(DOCUMENT3.get("id")));
+    assertThat(documentList.get(0).getString("id"), is(DOCUMENT.getString("id")));
+    assertThat(documentList.get(1).getString("id"), is(DOCUMENT2.getString("id")));
+    assertThat(documentList.get(2).getString("id"), is(DOCUMENT3.getString("id")));
   }
 
   private Comparator<WriteCompletedEvent> writeCompletedEventEqualityComparator() {
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
deleted file mode 100644
index e34a222..0000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-
-import java.io.StringReader;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3AccessCsvTest {
-
-  private static final String VALID_ACCESS_FILE = "Access key ID,Secret access key\n" +
-          "someKey,someSecret\n";
-
-  private static final String ANY_CSV_FILE = "Column1,Column2\n" +
-          "Foo,Bar\n";
-
-  @Test
-  public void testGetPasswordReturnsNullIfInputIsEmpty() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(""));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
-  }
-
-  @Test
-  public void testGetPasswordReturnsAccessAndSecretKeyIfInputIsAValidS3AccessFile() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(VALID_ACCESS_FILE));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).get(), is("someKey"));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).get(), is("someSecret"));
-  }
-
-  @Test
-  public void testGetPasswordReturnsNullIfNotAValidS3AccessFileProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(ANY_CSV_FILE));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
-  }
-
-  @Test
-  public void testGetPasswordReturnsNullIfAHeaderOnlyS3AccessFileProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Secret access key\n"));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
-  }
-
-  @Test
-  public void testGetPasswordReturnsNullIfOnlyOneValidColumnProvided() {
-    S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Column\n"));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
-    assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
-  }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
index 018c993..6334a15 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
@@ -1,12 +1,12 @@
 package org.apache.ambari.infra.job.archive;
 
-import org.junit.Test;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -28,9 +28,9 @@ import static org.junit.Assert.assertThat;
  */
 public class SolrParametrizedStringTest {
 
-  private static final Map<String, String> PARAMETERS_1 = new HashMap<String, String>() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); }};
-  private static final Map<String, String> PARAMETERS_START = new HashMap<String, String>() {{ put("price", "1000"); }};
-  private static final Map<String, String> PARAMETERS_END = new HashMap<String, String>() {{ put("price", "2000"); }};
+  private static final Map<String, Object> PARAMETERS_1 = new HashMap<String, Object>() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); }};
+  private static final Map<String, Object> PARAMETERS_START = new HashMap<String, Object>() {{ put("price", "1000"); }};
+  private static final Map<String, Object> PARAMETERS_END = new HashMap<String, Object>() {{ put("price", "2000"); }};
 
   @Test
   public void testToStringEmptyStringResultsEmptyString() {
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
index be8a226..d4b4313 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
@@ -1,12 +1,13 @@
 package org.apache.ambari.infra.job.archive;
 
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.junit.Assert.assertThat;
+
 import org.junit.Test;
 import org.springframework.batch.core.JobParameters;
 import org.springframework.batch.core.JobParametersBuilder;
 
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -27,7 +28,7 @@ import static org.junit.Assert.assertThat;
  */
 public class  SolrPropertiesTest {
   @Test
-  public void testApplySortColumns() throws Exception {
+  public void testMergeSortColumns() {
     JobParameters jobParameters = new JobParametersBuilder()
             .addString("sortColumn[0]", "logtime")
             .addString("sortColumn[1]", "id")
@@ -35,20 +36,31 @@ public class  SolrPropertiesTest {
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    solrProperties.apply(jobParameters);
-    assertThat(solrProperties.getSortColumn().length, is(2));
-    assertThat(solrProperties.getSortColumn()[0], is("logtime"));
-    assertThat(solrProperties.getSortColumn()[1], is("id"));
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
+    assertThat(solrParameters.getSortColumn().length, is(2));
+    assertThat(solrParameters.getSortColumn()[0], is("logtime"));
+    assertThat(solrParameters.getSortColumn()[1], is("id"));
+  }
+
+  @Test
+  public void testMergeWhenNoSortIsDefined() {
+    JobParameters jobParameters = new JobParametersBuilder()
+            .toJobParameters();
+
+    SolrProperties solrProperties = new SolrProperties();
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
+    assertThat(solrParameters.getSortColumn(), is(nullValue()));
   }
 
   @Test
-  public void testApplyWhenNoSortIsDefined() throws Exception {
+  public void testMergeWhenPropertiesAreDefinedButJobParamsAreNot() {
     JobParameters jobParameters = new JobParametersBuilder()
             .toJobParameters();
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    solrProperties.apply(jobParameters);
-    assertThat(solrProperties.getSortColumn().length, is(1));
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
+    assertThat(solrParameters.getSortColumn().length, is(1));
+    assertThat(solrParameters.getSortColumn()[0], is("testColumn"));
   }
 }
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
index ee08279..4f45189 100644
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
@@ -18,17 +18,22 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.solr.client.solrj.SolrQuery;
-import org.junit.Test;
-
-import java.util.HashMap;
-
+import static org.apache.ambari.infra.job.archive.SolrQueryBuilder.computeEnd;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.junit.Assert.assertThat;
 
+import java.time.Duration;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.HashMap;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.hamcrest.core.Is;
+import org.junit.Test;
+
 public class SolrQueryBuilderTest {
-  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{
+  private static final Document DOCUMENT = new Document(new HashMap<String, Object>() {{
     put("logtime", "2017-10-02'T'10:00:11.634Z");
     put("id", "1");
   }});
@@ -103,4 +108,27 @@ public class SolrQueryBuilderTest {
     SolrQuery solrQuery = new SolrQueryBuilder().setQueryText("id:[${start} TO ${end}]").build();
     assertThat(solrQuery.getQuery(), is("id:[* TO *]"));
   }
+
+  @Test
+  public void testComputeEndReturnsNullIsNoEndAndNoTTLWasGiven() {
+    assertThat(computeEnd(null, OffsetDateTime.now(), null), Is.is(nullValue()));
+  }
+
+  @Test
+  public void testComputeEndReturnsEndIfOnlyEndWasGiven() {
+    String end = "2018-10-09T10:11:12.000Z";
+    assertThat(computeEnd(end, OffsetDateTime.now(), null), Is.is(end));
+  }
+
+  @Test
+  public void testComputeEndReturnsNowMinusTtlIfOnlyTtlWasGiven() {
+    OffsetDateTime now = OffsetDateTime.of(2018, 10, 9, 10, 11, 12, 0, ZoneOffset.UTC);
+    assertThat(computeEnd(null, now, Duration.ofDays(5)), Is.is("2018-10-04T10:11:12.000Z"));
+  }
+
+  @Test
+  public void testComputeEndReturnsEndIfBothWasGiven() {
+    String end = "2018-10-09T10:11:12.000Z";
+    assertThat(computeEnd(end, OffsetDateTime.now(), Duration.ofDays(5)), Is.is(end));
+  }
 }
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/json/FsPermissionToStringConverterTest.java
similarity index 58%
rename from ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
rename to ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/json/FsPermissionToStringConverterTest.java
index e840d3b..1c52016 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/json/FsPermissionToStringConverterTest.java
@@ -16,25 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.infra.job.archive;
+package org.apache.ambari.infra.json;
 
-public enum S3AccessKeyNames {
-  AccessKeyId("AWS_ACCESS_KEY_ID", "Access key ID"),
-  SecretAccessKey("AWS_SECRET_ACCESS_KEY", "Secret access key");
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
 
-  private final String envVariableName;
-  private final String csvName;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.junit.Test;
 
-  S3AccessKeyNames(String envVariableName, String csvName) {
-    this.envVariableName = envVariableName;
-    this.csvName = csvName;
+public class FsPermissionToStringConverterTest {
+  @Test
+  public void testConvertWhenInputIsNotNull() {
+    assertThat(new FsPermissionToStringConverter().convert(new FsPermission("640")), is("640"));
   }
 
-  public String getEnvVariableName() {
-    return envVariableName;
+  @Test
+  public void testConvertWhenInputIsNull() {
+    assertThat(new FsPermissionToStringConverter().convert(null), is(nullValue()));
   }
-
-  public String getCsvName() {
-    return csvName;
-  }
-}
+}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/model/ISO8601DateFormatterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/model/ISO8601DateFormatterTest.java
new file mode 100644
index 0000000..b2bb0e8
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/model/ISO8601DateFormatterTest.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.model;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+import java.text.ParseException;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.Date;
+
+import org.junit.Test;
+
+public class ISO8601DateFormatterTest {
+
+  @Test
+  public void testFormat() {
+    OffsetDateTime offsetDateTime = OffsetDateTime.of(
+            2018, 11, 30,
+            2, 30, 11, 0,
+            ZoneOffset.ofHoursMinutes(1, 30));
+    String text = new ISO8601DateFormatter().format(Date.from(offsetDateTime.toInstant()));
+    assertThat(text, is("2018-11-30T01:00:11Z"));
+  }
+
+  @Test
+  public void testParse() throws ParseException {
+    Date now = new Date();
+    ISO8601DateFormatter iso8601DateFormatter = new ISO8601DateFormatter();
+    Date parsed = iso8601DateFormatter.parse(iso8601DateFormatter.format(now));
+    assertThat(parsed, is(now));
+  }
+}
\ No newline at end of file
diff --git a/ambari-infra/pom.xml b/ambari-infra/pom.xml
index ddaa5fb..602cdc6 100644
--- a/ambari-infra/pom.xml
+++ b/ambari-infra/pom.xml
@@ -35,6 +35,7 @@
     <surefire.argLine>-Xmx1024m -Xms512m</surefire.argLine>
     <zookeeper.version>3.4.6.2.3.0.0-2557</zookeeper.version>
     <skipSurefireTests>false</skipSurefireTests>
+    <log4j2.version>2.11.0</log4j2.version>
   </properties>
 
   <licenses>
@@ -360,6 +361,16 @@
           </exclusion>
         </exclusions>
       </dependency>
+      <dependency>
+        <groupId>org.hamcrest</groupId>
+        <artifactId>hamcrest-all</artifactId>
+        <version>1.3</version>
+      </dependency>
+      <dependency>
+        <groupId>com.google.code.gson</groupId>
+        <artifactId>gson</artifactId>
+        <version>2.8.5</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>