You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by kr...@apache.org on 2018/11/15 19:04:23 UTC

[ambari-infra] branch master updated: AMBARI-24895 - Infra Manager: code clean up (#15)

This is an automated email from the ASF dual-hosted git repository.

krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ambari-infra.git


The following commit(s) were added to refs/heads/master by this push:
     new da6bdb9  AMBARI-24895 - Infra Manager: code clean up (#15)
da6bdb9 is described below

commit da6bdb96d920432a47a25b698ce8e7cdec7f3c96
Author: kasakrisz <33...@users.noreply.github.com>
AuthorDate: Thu Nov 15 20:04:18 2018 +0100

    AMBARI-24895 - Infra Manager: code clean up (#15)
---
 ambari-infra-manager-it/pom.xml                    |  10 +
 .../java/org/apache/ambari/infra/InfraClient.java  |   8 +-
 .../test/java/org/apache/ambari/infra/Solr.java    |   8 +-
 .../java/org/apache/ambari/infra/TestUtil.java     |  12 +-
 .../ambari/infra/solr/metrics/MetricsIT.java       |  16 +-
 .../infra/solr/metrics/MockMetricsServer.java      |  10 +-
 .../ambari/infra/steps/AbstractInfraSteps.java     |  26 +--
 .../apache/ambari/infra/steps/ExportJobsSteps.java |  10 +-
 .../infra/conf/security/HadoopCredentialStore.java |  12 +-
 .../infra/doc/InfraManagerApiDocStorage.java       |  10 +-
 .../infra/job/AbstractJobsConfiguration.java       |  10 +-
 .../org/apache/ambari/infra/job/JobScheduler.java  |   8 +-
 .../org/apache/ambari/infra/job/SolrDAOBase.java   |  10 +-
 .../infra/job/archive/AbstractFileAction.java      |  10 +-
 ...ingParameters.java => ArchivingProperties.java} |  44 +++-
 .../archive/DocumentArchivingConfiguration.java    |  25 +--
 .../job/archive/DocumentArchivingProperties.java   | 221 ---------------------
 .../job/archive/DocumentArchivingPropertyMap.java  |  10 +-
 .../ambari/infra/job/archive/DocumentExporter.java |   8 +-
 .../infra/job/archive/FileNameSuffixFormatter.java |   2 +-
 .../infra/job/archive/LocalDocumentItemWriter.java |  20 +-
 .../ambari/infra/job/archive/S3AccessCsv.java      |  16 +-
 .../ambari/infra/job/archive/S3Uploader.java       |   8 +-
 .../apache/ambari/infra/job/archive/SolrDAO.java   |  12 +-
 .../ambari/infra/job/archive/SolrParameters.java   |  93 ---------
 .../ambari/infra/job/archive/SolrProperties.java   |  37 +++-
 .../infra/job/cleanup/CleanUpConfiguration.java    |   4 +-
 .../infra/job/cleanup/CleanUpParameters.java       |  48 -----
 .../infra/job/cleanup/CleanUpProperties.java       |  22 +-
 .../ambari/infra/job/cleanup/TaskHistoryWiper.java |   6 +-
 ...tingParameters.java => DeletingProperties.java} |  17 +-
 .../deleting/DocumentDeletingConfiguration.java    |   4 +-
 .../job/deleting/DocumentDeletingProperties.java   |  78 --------
 .../job/deleting/DocumentDeletingPropertyMap.java  |  10 +-
 .../infra/job/deleting/DocumentWiperTasklet.java   |   8 +-
 .../ambari/infra/job/dummy/DummyItemProcessor.java |   8 +-
 .../ambari/infra/job/dummy/DummyItemWriter.java    |  27 +--
 .../ambari/infra/job/dummy/DummyJobListener.java   |  12 +-
 .../ambari/infra/job/dummy/DummyStepListener.java  |  10 +-
 .../apache/ambari/infra/manager/JobManager.java    |  10 +-
 .../ambari/infra/rest/JobExceptionMapper.java      |  28 +--
 .../org/apache/ambari/infra/rest/JobResource.java  |   8 +-
 .../infra/job/archive/SolrPropertiesTest.java      |   6 +-
 .../ambari/infra/solr/AmbariSolrCloudCLI.java      |   6 +-
 .../ambari/infra/solr/AmbariSolrCloudClient.java   |  40 ++--
 .../infra/solr/commands/AbstractRetryCommand.java  |   6 +-
 .../solr/commands/SecureSolrZNodeZkCommand.java    |  24 +--
 .../solr/commands/UpdateStateFileZkCommand.java    |  22 +-
 .../apache/ambari/infra/solr/util/ShardUtils.java  |  27 ++-
 .../infra/solr/metrics/reporters/MetricsUtils.java |  10 +-
 .../metrics/reporters/ScheduledAMSReporter.java    |   8 +-
 .../solr/metrics/reporters/SolrMetricsSink.java    |   4 +-
 52 files changed, 383 insertions(+), 726 deletions(-)

diff --git a/ambari-infra-manager-it/pom.xml b/ambari-infra-manager-it/pom.xml
index 68d4352..f1f296b 100644
--- a/ambari-infra-manager-it/pom.xml
+++ b/ambari-infra-manager-it/pom.xml
@@ -157,6 +157,16 @@
       <artifactId>hadoop-hdfs-client</artifactId>
       <version>${hadoop.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <version>2.11.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <version>2.11.1</version>
+    </dependency>
   </dependencies>
 
   <build>
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
index ddc4f00..fdccbab 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
@@ -39,8 +39,8 @@ import org.apache.http.client.utils.URIBuilder;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.HttpClientBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.type.TypeReference;
@@ -49,7 +49,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 
 // TODO: use swagger
 public class InfraClient implements AutoCloseable {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraClient.class);
+  private static final Logger logger = LogManager.getLogger(InfraClient.class);
 
   private final CloseableHttpClient httpClient;
   private final URI baseUrl;
@@ -77,7 +77,7 @@ public class InfraClient implements AutoCloseable {
     try (CloseableHttpResponse response = httpClient.execute(post)) {
       String responseBodyText = IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
       int statusCode = response.getStatusLine().getStatusCode();
-      LOG.info("Response code {} body {} ", statusCode, responseBodyText);
+      logger.info("Response code {} body {} ", statusCode, responseBodyText);
       if (!(200 <= statusCode && statusCode <= 299))
         throw new RuntimeException("Error while executing http request: " + responseBodyText);
       return new HttpResponse(statusCode, responseBodyText);
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
index 0dcc91a..7bc952a 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
@@ -31,17 +31,17 @@ import org.apache.http.client.methods.HttpGet;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrInputDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class Solr {
-  private static final Logger LOG = LoggerFactory.getLogger(Solr.class);
+  private static final Logger logger = LogManager.getLogger(Solr.class);
   public static final String AUDIT_LOGS_COLLECTION = "audit_logs";
   public static final String HADOOP_LOGS_COLLECTION = "hadoop_logs";
   private static final int SOLR_PORT = 8983;
@@ -85,7 +85,7 @@ public class Solr {
   }
 
   public void createSolrCollection(String collectionName) {
-    LOG.info("Creating collection");
+    logger.info("Creating collection");
     runCommand(new String[]{"docker", "exec", "docker_solr_1", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"});
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
index f48e107..8f19ce9 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
@@ -25,11 +25,11 @@ import java.util.function.BooleanSupplier;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TestUtil {
-  private static final Logger LOG = LoggerFactory.getLogger(TestUtil.class);
+  private static final Logger logger = LogManager.getLogger(TestUtil.class);
 
   public static void doWithin(int sec, String actionName, BooleanSupplier predicate) {
     doWithin(sec, actionName, () -> {
@@ -54,7 +54,7 @@ public class TestUtil {
         throw new AssertionError(String.format("Unable to perform action '%s' within %d seconds", actionName, sec), exception);
       }
       else {
-        LOG.info("Performing action '{}' failed. retrying...", actionName);
+        logger.info("Performing action '{}' failed. retrying...", actionName);
       }
       try {
         Thread.sleep(1000);
@@ -71,10 +71,10 @@ public class TestUtil {
 
   public static void runCommand(String[] command) {
     try {
-      LOG.info("Exec command: {}", StringUtils.join(command, " "));
+      logger.info("Exec command: {}", StringUtils.join(command, " "));
       Process process = Runtime.getRuntime().exec(command);
       String stdout = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
-      LOG.info("Exec command result {}", stdout);
+      logger.info("Exec command result {}", stdout);
     } catch (Exception e) {
       throw new RuntimeException("Error during execute shell command: ", e);
     }
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
index 3016d67..c400aee 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
@@ -30,14 +30,14 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.ambari.infra.Solr;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class MetricsIT {
-  private static final Logger LOG = LoggerFactory.getLogger(MetricsIT.class);
+  private static final Logger logger = LogManager.getLogger(MetricsIT.class);
 
   private static MockMetricsServer metricsServer;
   private static String shellScriptLocation;
@@ -49,7 +49,7 @@ public class MetricsIT {
 
     // TODO: use the same containers as ambari-infra-manager-it
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh";
-    LOG.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
+    logger.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
     Solr solr = new Solr("/usr/lib/ambari-infra-solr/server/solr");
@@ -61,8 +61,8 @@ public class MetricsIT {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
-    LOG.info("shutdown containers");
+  public static void tearDown() {
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
 
@@ -74,10 +74,10 @@ public class MetricsIT {
       Thread.sleep(1000);
       if (currentTimeMillis() - start > 30 * 1000)
         break;
-      LOG.info("Checking any metrics arrived...");
+      logger.info("Checking any metrics arrived...");
     }
 
-    metricsServer.getNotReceivedMetrics().forEach(metric -> LOG.info("Metric not received: {}", metric));
+    metricsServer.getNotReceivedMetrics().forEach(metric -> logger.info("Metric not received: {}", metric));
     assertThat(metricsServer.getNotReceivedMetrics().isEmpty(), is(true));
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
index 9d2734f..7197446 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
@@ -27,8 +27,8 @@ import static spark.Spark.post;
 import java.util.Set;
 import java.util.concurrent.ConcurrentSkipListSet;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.google.gson.Gson;
 
@@ -37,7 +37,7 @@ import spark.Response;
 import spark.servlet.SparkApplication;
 
 public class MockMetricsServer implements SparkApplication {
-  private static final Logger LOG = LoggerFactory.getLogger(MockMetricsServer.class);
+  private static final Logger logger = LogManager.getLogger(MockMetricsServer.class);
   private static final String HOST_NAME = "metrics_collector";
 
   private Set<String> expectedMetrics;
@@ -51,14 +51,14 @@ public class MockMetricsServer implements SparkApplication {
   }
 
   private Object queryState(Request request, Response response) {
-    LOG.info("Sending hostname {}", HOST_NAME);
+    logger.info("Sending hostname {}", HOST_NAME);
     response.type("application/json");
     return new Gson().toJson(singletonList(HOST_NAME));
   }
 
   private Object logBody(Request req, Response resp) {
     String body = req.body();
-    LOG.info("Incoming metrics {}", body);
+    logger.info("Incoming metrics {}", body);
 
     expectedMetrics.removeIf(body::contains);
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
index 7a748bc..6ac3709 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
@@ -40,14 +40,14 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.common.SolrInputDocument;
 import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.BeforeStories;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public abstract class AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractInfraSteps.class);
+  private static final Logger logger = LogManager.getLogger(AbstractInfraSteps.class);
 
   private static final int INFRA_MANAGER_PORT = 61890;
   private static final int FAKE_S3_PORT = 4569;
@@ -85,12 +85,12 @@ public abstract class AbstractInfraSteps {
 
     String localDataFolder = getLocalDataFolder();
     if (new File(localDataFolder).exists()) {
-      LOG.info("Clean local data folder {}", localDataFolder);
+      logger.info("Clean local data folder {}", localDataFolder);
       FileUtils.cleanDirectory(new File(localDataFolder));
     }
 
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh";
-    LOG.info("Create new docker container for testing Ambari Infra Manager ...");
+    logger.info("Create new docker container for testing Ambari Infra Manager ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
     dockerHost = getDockerHost();
@@ -101,7 +101,7 @@ public abstract class AbstractInfraSteps {
     solr.createSolrCollection(AUDIT_LOGS_COLLECTION);
     solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
 
-    LOG.info("Initializing s3 client");
+    logger.info("Initializing s3 client");
     s3client = new S3Client(dockerHost, FAKE_S3_PORT, S3_BUCKET_NAME);
 
     checkInfraManagerReachable();
@@ -110,7 +110,7 @@ public abstract class AbstractInfraSteps {
   private void checkInfraManagerReachable() throws Exception {
     try (InfraClient httpClient = getInfraClient()) {
       doWithin(30, "Start Ambari Infra Manager", httpClient::getJobs);
-      LOG.info("Ambari Infra Manager is up and running");
+      logger.info("Ambari Infra Manager is up and running");
     }
   }
 
@@ -155,21 +155,21 @@ public abstract class AbstractInfraSteps {
   public void shutdownContainers() throws Exception {
     Thread.sleep(2000); // sync with s3 server
     List<String> objectKeys = getS3client().listObjectKeys();
-    LOG.info("Found {} files on s3.", objectKeys.size());
-    objectKeys.forEach(objectKey ->  LOG.info("Found file on s3 with key {}", objectKey));
+    logger.info("Found {} files on s3.", objectKeys.size());
+    objectKeys.forEach(objectKey ->  logger.info("Found file on s3 with key {}", objectKey));
 
-    LOG.info("Listing files on hdfs.");
+    logger.info("Listing files on hdfs.");
     try (FileSystem fileSystem = getHdfs()) {
       int count = 0;
       RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path("/test_audit_logs"), true);
       while (it.hasNext()) {
-        LOG.info("Found file on hdfs with name {}", it.next().getPath().getName());
+        logger.info("Found file on hdfs with name {}", it.next().getPath().getName());
         ++count;
       }
-      LOG.info("{} files found on hfds", count);
+      logger.info("{} files found on hfds", count);
     }
 
-    LOG.info("shutdown containers");
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
index e2bbe9d..ff96c9a 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
@@ -49,18 +49,18 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.jbehave.core.annotations.Given;
 import org.jbehave.core.annotations.Then;
 import org.jbehave.core.annotations.When;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class ExportJobsSteps extends AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(ExportJobsSteps.class);
+  private static final Logger logger = LogManager.getLogger(ExportJobsSteps.class);
   private Set<String> documentIds = new HashSet<>();
 
   private Map<String, JobExecutionInfo> launchedJobs = new HashMap<>();
@@ -101,7 +101,7 @@ public class ExportJobsSteps extends AbstractInfraSteps {
     Thread.sleep(waitSec * 1000);
     try (InfraClient httpClient = getInfraClient()) {
       JobExecutionInfo jobExecutionInfo = httpClient.startJob(jobName, parameters);
-      LOG.info("Job {} started: {}", jobName, jobExecutionInfo);
+      logger.info("Job {} started: {}", jobName, jobExecutionInfo);
       launchedJobs.put(jobName, jobExecutionInfo);
     }
   }
@@ -208,7 +208,7 @@ public class ExportJobsSteps extends AbstractInfraSteps {
   @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName")
   public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) {
     File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId()));
-    LOG.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
+    logger.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
     doWithin(5, "Destination directory exists", destinationDirectory::exists);
 
     File[] files = requireNonNull(destinationDirectory.listFiles(),
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
index 9e1a17f..957a45d 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
@@ -18,16 +18,16 @@
  */
 package org.apache.ambari.infra.conf.security;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static org.apache.commons.lang.StringUtils.isBlank;
+import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
 
 import java.util.Optional;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class HadoopCredentialStore implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerSecurityConfig.class);
+  private static final Logger logger = LogManager.getLogger(InfraManagerSecurityConfig.class);
   public static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path";
 
   private final String credentialStoreProviderPath;
@@ -48,7 +48,7 @@ public class HadoopCredentialStore implements PasswordStore {
       char[] passwordChars = config.getPassword(propertyName);
       return (isNotEmpty(passwordChars)) ? Optional.of(new String(passwordChars)) : Optional.empty();
     } catch (Exception e) {
-      LOG.warn("Could not load password {} from credential store.", propertyName);
+      logger.warn("Could not load password {} from credential store.", propertyName);
       return Optional.empty();
     }
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
index 5d525fa..5ababcc 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
@@ -25,8 +25,8 @@ import javax.annotation.PostConstruct;
 import javax.inject.Inject;
 import javax.inject.Named;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.swagger.jaxrs.config.BeanConfig;
 import io.swagger.models.Swagger;
@@ -36,7 +36,7 @@ import io.swagger.util.Yaml;
 @Named
 public class InfraManagerApiDocStorage {
 
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerApiDocStorage.class);
+  private static final Logger logger = LogManager.getLogger(InfraManagerApiDocStorage.class);
 
   private final Map<String, Object> swaggerMap = new ConcurrentHashMap<>();
 
@@ -48,7 +48,7 @@ public class InfraManagerApiDocStorage {
     Thread loadApiDocThread = new Thread("load_swagger_api_doc") {
       @Override
       public void run() {
-        LOG.info("Start thread to scan REST API doc from endpoints.");
+        logger.info("Start thread to scan REST API doc from endpoints.");
         Swagger swagger = beanConfig.getSwagger();
         swagger.addSecurityDefinition("basicAuth", new BasicAuthDefinition());
         beanConfig.configure(swagger);
@@ -66,7 +66,7 @@ public class InfraManagerApiDocStorage {
         } catch (Exception e) {
           e.printStackTrace();
         }
-        LOG.info("Scanning REST API endpoints and generating docs has been successful.");
+        logger.info("Scanning REST API endpoints and generating docs has been successful.");
       }
     };
     loadApiDocThread.setDaemon(true);
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
index 314e52e..8806cf0 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
@@ -22,8 +22,8 @@ import java.util.Map;
 
 import javax.annotation.PostConstruct;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
 import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
@@ -32,7 +32,7 @@ import org.springframework.boot.context.event.ApplicationReadyEvent;
 import org.springframework.context.event.EventListener;
 
 public abstract class AbstractJobsConfiguration<TProperties extends JobProperties<TParameters>, TParameters extends Validatable> {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractJobsConfiguration.class);
+  private static final Logger logger = LogManager.getLogger(AbstractJobsConfiguration.class);
 
   private final Map<String, TProperties> propertyMap;
   private final JobScheduler scheduler;
@@ -56,13 +56,13 @@ public abstract class AbstractJobsConfiguration<TProperties extends JobPropertie
             .forEach(jobName -> {
               try {
                 propertyMap.get(jobName).validate(jobName);
-                LOG.info("Registering job {}", jobName);
+                logger.info("Registering job {}", jobName);
                 JobBuilder jobBuilder = jobs.get(jobName).listener(new JobsPropertyMap<>(propertyMap));
                 Job job = buildJob(jobBuilder);
                 jobRegistryBeanPostProcessor.postProcessAfterInitialization(job, jobName);
               }
               catch (Exception e) {
-                LOG.warn("Unable to register job " + jobName, e);
+                logger.warn("Unable to register job " + jobName, e);
                 propertyMap.get(jobName).setEnabled(false);
               }
             });
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
index 7d77fbd..4edfae9 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
@@ -24,8 +24,8 @@ import javax.inject.Inject;
 import javax.inject.Named;
 
 import org.apache.ambari.infra.manager.Jobs;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -40,7 +40,7 @@ import org.springframework.scheduling.support.CronTrigger;
 
 @Named
 public class JobScheduler {
-  private static final Logger LOG = LoggerFactory.getLogger(JobScheduler.class);
+  private static final Logger logger = LogManager.getLogger(JobScheduler.class);
 
   private final TaskScheduler scheduler;
   private final Jobs jobs;
@@ -59,7 +59,7 @@ public class JobScheduler {
     }
 
     scheduler.schedule(() -> launchJob(jobName), new CronTrigger(schedulingProperties.getCron()));
-    LOG.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
+    logger.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
   }
 
   private void restartIfFailed(JobExecution jobExecution) {
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
index e6f2c2f..5569cf0 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
@@ -25,14 +25,14 @@ import java.util.List;
 import java.util.Optional;
 import java.util.stream.Collectors;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.zookeeper.client.ConnectStringParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public abstract class SolrDAOBase {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAOBase.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAOBase.class);
 
   private final String zooKeeperConnectionString;
   private final String defaultCollection;
@@ -45,14 +45,14 @@ public abstract class SolrDAOBase {
   protected void delete(String deleteQueryText) {
     try (CloudSolrClient client = createClient()) {
       try {
-        LOG.info("Executing solr delete by query {}", deleteQueryText);
+        logger.info("Executing solr delete by query {}", deleteQueryText);
         client.deleteByQuery(deleteQueryText);
         client.commit();
       } catch (Exception e) {
         try {
           client.rollback();
         } catch (SolrServerException e1) {
-          LOG.warn("Unable to rollback after solr delete operation failure.", e1);
+          logger.warn("Unable to rollback after solr delete operation failure.", e1);
         }
         throw new RuntimeException(e);
       }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
index 3df18b6..3f89ec8 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
@@ -18,19 +18,19 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
 public abstract class AbstractFileAction implements FileAction {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractFileAction.class);
+  private static final Logger logger = LogManager.getLogger(AbstractFileAction.class);
 
   @Override
   public File perform(File inputFile) {
     File outputFile =  onPerform(inputFile);
     if (!inputFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
     return outputFile;
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
similarity index 72%
rename from ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java
rename to ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
index 5c783d6..3ad3926 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
@@ -22,29 +22,33 @@ import static java.util.Objects.requireNonNull;
 import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS;
 import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL;
 import static org.apache.ambari.infra.job.archive.ExportDestination.S3;
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
+import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.time.Duration;
 import java.util.Optional;
 
+import org.apache.ambari.infra.job.JobProperties;
 import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
 import org.apache.ambari.infra.json.FsPermissionToStringConverter;
 import org.apache.ambari.infra.json.StringToDurationConverter;
 import org.apache.ambari.infra.json.StringToFsPermissionConverter;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.springframework.batch.core.JobParameters;
 
 import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class ArchivingParameters implements Validatable {
+public class ArchivingProperties extends JobProperties<ArchivingProperties> implements Validatable {
   private int readBlockSize;
   private int writeBlockSize;
   private ExportDestination destination;
   private String localDestinationDirectory;
   private String fileNameSuffixColumn;
   private String fileNameSuffixDateFormat;
-  private SolrParameters solr;
+  private SolrProperties solr;
   private String s3AccessFile;
   private String s3KeyPrefix;
   private String s3BucketName;
@@ -110,11 +114,11 @@ public class ArchivingParameters implements Validatable {
     this.fileNameSuffixDateFormat = fileNameSuffixDateFormat;
   }
 
-  public SolrParameters getSolr() {
+  public SolrProperties getSolr() {
     return solr;
   }
 
-  public void setSolr(SolrParameters solr) {
+  public void setSolr(SolrProperties solr) {
     this.solr = solr;
   }
 
@@ -272,4 +276,36 @@ public class ArchivingParameters implements Validatable {
     requireNonNull(solr, "No solr query was specified for archiving job!");
     solr.validate();
   }
+
+  @Override
+  public ArchivingProperties merge(JobParameters jobParameters) {
+    ArchivingProperties archivingProperties = new ArchivingProperties();
+    archivingProperties.setReadBlockSize(getIntJobParameter(jobParameters, "readBlockSize", readBlockSize));
+    archivingProperties.setWriteBlockSize(getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize));
+    archivingProperties.setDestination(ExportDestination.valueOf(jobParameters.getString("destination", destination.name())));
+    archivingProperties.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
+    archivingProperties.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
+    archivingProperties.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
+    archivingProperties.setS3AccessFile(jobParameters.getString("s3AccessFile", s3AccessFile));
+    archivingProperties.setS3BucketName(jobParameters.getString("s3BucketName", s3BucketName));
+    archivingProperties.setS3KeyPrefix(jobParameters.getString("s3KeyPrefix", s3KeyPrefix));
+    archivingProperties.setS3Endpoint(jobParameters.getString("s3Endpoint", s3Endpoint));
+    archivingProperties.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
+    archivingProperties.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
+    archivingProperties.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
+    archivingProperties.setHdfsKerberosPrincipal(jobParameters.getString("hdfsKerberosPrincipal", hdfsKerberosPrincipal));
+    archivingProperties.setHdfsKerberosKeytabPath(jobParameters.getString("hdfsKerberosKeytabPath", hdfsKerberosKeytabPath));
+    archivingProperties.setSolr(solr.merge(jobParameters));
+    archivingProperties.setStart(jobParameters.getString("start"));
+    archivingProperties.setEnd(jobParameters.getString("end"));
+    archivingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return archivingProperties;
+  }
+
+  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
+    String valueText = jobParameters.getString(parameterName);
+    if (isBlank(valueText))
+      return defaultValue;
+    return Integer.parseInt(valueText);
+  }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
index af522d3..319cc5b 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
@@ -32,8 +32,9 @@ import org.apache.ambari.infra.job.AbstractJobsConfiguration;
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.apache.ambari.infra.job.JobScheduler;
 import org.apache.ambari.infra.job.ObjectSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.Step;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
@@ -48,8 +49,8 @@ import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
 @Configuration
-public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<DocumentArchivingProperties, ArchivingParameters> {
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentArchivingConfiguration.class);
+public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<ArchivingProperties, ArchivingProperties> {
+  private static final Logger logger = LogManager.getLogger(DocumentArchivingConfiguration.class);
   private static final DocumentWiper NOT_DELETE = (firstDocument, lastDocument) -> { };
 
   private final StepBuilderFactory steps;
@@ -85,7 +86,7 @@ public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<Do
   @StepScope
   public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
                                            @Value("#{stepExecution.jobExecution.jobId}") String jobId,
-                                           @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+                                           @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                            InfraManagerDataConfig infraManagerDataConfig,
                                            @Value("#{jobParameters[end]}") String intervalEnd,
                                            DocumentWiper documentWiper,
@@ -118,10 +119,10 @@ public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<Do
                     parameters.getSolr().getCollection(),
                     jobId,
                     isBlank(intervalEnd) ? "" : fileNameSuffixFormatter.format(intervalEnd)));
-    LOG.info("Destination directory path={}", destinationDirectory);
+    logger.info("Destination directory path={}", destinationDirectory);
     if (!destinationDirectory.exists()) {
       if (!destinationDirectory.mkdirs()) {
-        LOG.warn("Unable to create directory {}", destinationDirectory);
+        logger.warn("Unable to create directory {}", destinationDirectory);
       }
     }
 
@@ -134,7 +135,7 @@ public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<Do
 
   @Bean
   @StepScope
-  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                      SolrDAO solrDAO) {
     if (isBlank(parameters.getSolr().getDeleteQueryText()))
       return NOT_DELETE;
@@ -143,26 +144,26 @@ public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<Do
 
   @Bean
   @StepScope
-  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters) {
+  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters) {
     return new SolrDAO(parameters.getSolr());
   }
 
   private File outFile(String collection, File directoryPath, String suffix) {
     File file = new File(directoryPath, String.format("%s_-_%s.json", collection, suffix));
-    LOG.info("Exporting to temp file {}", file.getAbsolutePath());
+    logger.info("Exporting to temp file {}", file.getAbsolutePath());
     return file;
   }
 
   @Bean
   @StepScope
   public DocumentItemReader reader(ObjectSource<Document> documentSource,
-                                   @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters properties) {
+                                   @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties properties) {
     return new DocumentItemReader(documentSource, properties.getReadBlockSize());
   }
 
   @Bean
   @StepScope
-  public ObjectSource<Document> documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+  public ObjectSource<Document> documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                                SolrDAO solrDAO) {
 
     return new SolrDocumentSource(solrDAO, parameters.getStart(), computeEnd(parameters.getEnd(), parameters.getTtl()));
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
deleted file mode 100644
index 8ad576c..0000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
-import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.apache.ambari.infra.json.FsPermissionToStringConverter;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.springframework.batch.core.JobParameters;
-
-public class DocumentArchivingProperties extends JobProperties<ArchivingParameters> {
-  private int readBlockSize;
-  private int writeBlockSize;
-  private ExportDestination destination;
-  private String localDestinationDirectory;
-  private String fileNameSuffixColumn;
-  private String fileNameSuffixDateFormat;
-  private Duration ttl;
-  private SolrProperties solr;
-
-  private String s3AccessFile;
-  private String s3KeyPrefix;
-  private String s3BucketName;
-  private String s3Endpoint;
-
-  private String hdfsEndpoint;
-  private String hdfsDestinationDirectory;
-  private FsPermission hdfsFilePermission;
-  private String hdfsKerberosPrincipal;
-  private String hdfsKerberosKeytabPath;
-
-  public int getReadBlockSize() {
-    return readBlockSize;
-  }
-
-  public void setReadBlockSize(int readBlockSize) {
-    this.readBlockSize = readBlockSize;
-  }
-
-  public int getWriteBlockSize() {
-    return writeBlockSize;
-  }
-
-  public void setWriteBlockSize(int writeBlockSize) {
-    this.writeBlockSize = writeBlockSize;
-  }
-
-  public ExportDestination getDestination() {
-    return destination;
-  }
-
-  public void setDestination(ExportDestination destination) {
-    this.destination = destination;
-  }
-
-  public String getLocalDestinationDirectory() {
-    return localDestinationDirectory;
-  }
-
-  public void setLocalDestinationDirectory(String localDestinationDirectory) {
-    this.localDestinationDirectory = localDestinationDirectory;
-  }
-
-  public String getFileNameSuffixColumn() {
-    return fileNameSuffixColumn;
-  }
-
-  public void setFileNameSuffixColumn(String fileNameSuffixColumn) {
-    this.fileNameSuffixColumn = fileNameSuffixColumn;
-  }
-
-  public String getFileNameSuffixDateFormat() {
-    return fileNameSuffixDateFormat;
-  }
-
-  public void setFileNameSuffixDateFormat(String fileNameSuffixDateFormat) {
-    this.fileNameSuffixDateFormat = fileNameSuffixDateFormat;
-  }
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  public SolrProperties getSolr() {
-    return solr;
-  }
-
-  public void setSolr(SolrProperties query) {
-    this.solr = query;
-  }
-
-  public String getS3AccessFile() {
-    return s3AccessFile;
-  }
-
-  public void setS3AccessFile(String s3AccessFile) {
-    this.s3AccessFile = s3AccessFile;
-  }
-
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
-  }
-
-  public void setS3KeyPrefix(String s3KeyPrefix) {
-    this.s3KeyPrefix = s3KeyPrefix;
-  }
-
-  public String getS3BucketName() {
-    return s3BucketName;
-  }
-
-  public void setS3BucketName(String s3BucketName) {
-    this.s3BucketName = s3BucketName;
-  }
-
-  public String getS3Endpoint() {
-    return s3Endpoint;
-  }
-
-  public void setS3Endpoint(String s3Endpoint) {
-    this.s3Endpoint = s3Endpoint;
-  }
-
-  public String getHdfsEndpoint() {
-    return hdfsEndpoint;
-  }
-
-  public void setHdfsEndpoint(String hdfsEndpoint) {
-    this.hdfsEndpoint = hdfsEndpoint;
-  }
-
-  public String getHdfsDestinationDirectory() {
-    return hdfsDestinationDirectory;
-  }
-
-  public FsPermission getHdfsFilePermission() {
-    return hdfsFilePermission;
-  }
-
-  public void setHdfsFilePermission(FsPermission hdfsFilePermission) {
-    this.hdfsFilePermission = hdfsFilePermission;
-  }
-
-  public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) {
-    this.hdfsDestinationDirectory = hdfsDestinationDirectory;
-  }
-
-  public String getHdfsKerberosPrincipal() {
-    return hdfsKerberosPrincipal;
-  }
-
-  public void setHdfsKerberosPrincipal(String hdfsKerberosPrincipal) {
-    this.hdfsKerberosPrincipal = hdfsKerberosPrincipal;
-  }
-
-  public String getHdfsKerberosKeytabPath() {
-    return hdfsKerberosKeytabPath;
-  }
-
-  public void setHdfsKerberosKeytabPath(String hdfsKerberosKeytabPath) {
-    this.hdfsKerberosKeytabPath = hdfsKerberosKeytabPath;
-  }
-
-  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
-    String valueText = jobParameters.getString(parameterName);
-    if (isBlank(valueText))
-      return defaultValue;
-    return Integer.parseInt(valueText);
-  }
-
-  @Override
-  public ArchivingParameters merge(JobParameters jobParameters) {
-    ArchivingParameters archivingParameters = new ArchivingParameters();
-    archivingParameters.setReadBlockSize(getIntJobParameter(jobParameters, "readBlockSize", readBlockSize));
-    archivingParameters.setWriteBlockSize(getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize));
-    archivingParameters.setDestination(ExportDestination.valueOf(jobParameters.getString("destination", destination.name())));
-    archivingParameters.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
-    archivingParameters.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
-    archivingParameters.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
-    archivingParameters.setS3AccessFile(jobParameters.getString("s3AccessFile", s3AccessFile));
-    archivingParameters.setS3BucketName(jobParameters.getString("s3BucketName", s3BucketName));
-    archivingParameters.setS3KeyPrefix(jobParameters.getString("s3KeyPrefix", s3KeyPrefix));
-    archivingParameters.setS3Endpoint(jobParameters.getString("s3Endpoint", s3Endpoint));
-    archivingParameters.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
-    archivingParameters.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
-    archivingParameters.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
-    archivingParameters.setHdfsKerberosPrincipal(jobParameters.getString("hdfsKerberosPrincipal", hdfsKerberosPrincipal));
-    archivingParameters.setHdfsKerberosKeytabPath(jobParameters.getString("hdfsKerberosKeytabPath", hdfsKerberosKeytabPath));
-    archivingParameters.setSolr(solr.merge(jobParameters));
-    archivingParameters.setStart(jobParameters.getString("start"));
-    archivingParameters.setEnd(jobParameters.getString("end"));
-    archivingParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return archivingParameters;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
index a009031..253fc40 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentArchivingPropertyMap {
-  private Map<String, DocumentArchivingProperties> solrDataArchiving;
+  private Map<String, ArchivingProperties> solrDataArchiving;
 
-  public Map<String, DocumentArchivingProperties> getSolrDataArchiving() {
+  public Map<String, ArchivingProperties> getSolrDataArchiving() {
     return solrDataArchiving;
   }
 
-  public void setSolrDataArchiving(Map<String, DocumentArchivingProperties> solrDataArchiving) {
+  public void setSolrDataArchiving(Map<String, ArchivingProperties> solrDataArchiving) {
     this.solrDataArchiving = solrDataArchiving;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
index d87fdea..f61746d 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
@@ -19,8 +19,8 @@
 package org.apache.ambari.infra.job.archive;
 
 import org.apache.ambari.infra.job.JobContextRepository;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.BatchStatus;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepContribution;
@@ -34,7 +34,7 @@ import org.springframework.batch.repeat.RepeatStatus;
 
 public class DocumentExporter implements Tasklet, StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentExporter.class);
+  private static final Logger logger = LogManager.getLogger(DocumentExporter.class);
 
   private boolean complete = false;
   private final ItemStreamReader<Document> documentReader;
@@ -78,7 +78,7 @@ public class DocumentExporter implements Tasklet, StepExecutionListener {
         if (writer != null && writtenCount >= writeBlockSize) {
           stepExecution = jobContextRepository.getStepExecution(stepExecution.getJobExecutionId(), stepExecution.getId());
           if (stepExecution.getJobExecution().getStatus() == BatchStatus.STOPPING) {
-            LOG.info("Received stop signal.");
+            logger.info("Received stop signal.");
             writer.revert();
             writer = null;
             return RepeatStatus.CONTINUABLE;
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
index b15d8b7..65b518f 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
@@ -28,7 +28,7 @@ import java.time.format.DateTimeFormatter;
 public class FileNameSuffixFormatter {
   public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern(SOLR_DATE_FORMAT_TEXT);
 
-  public static FileNameSuffixFormatter from(ArchivingParameters properties) {
+  public static FileNameSuffixFormatter from(ArchivingProperties properties) {
     return new FileNameSuffixFormatter(properties.getFileNameSuffixColumn(), properties.getFileNameSuffixDateFormat());
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
index 531d2d5..1cf5b65 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
@@ -18,15 +18,23 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UncheckedIOException;
+import java.io.UnsupportedEncodingException;
+
 import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
-import java.io.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class LocalDocumentItemWriter implements DocumentItemWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(LocalDocumentItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(LocalDocumentItemWriter.class);
 
   private static final ObjectMapper json = new ObjectMapper();
   private static final String ENCODING = "UTF-8";
@@ -69,7 +77,7 @@ public class LocalDocumentItemWriter implements DocumentItemWriter {
   public void revert() {
     IOUtils.closeQuietly(bufferedWriter);
     if (!outFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
   }
 
   @Override
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
index 879b58b..7c4de52 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
@@ -18,11 +18,7 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static org.apache.commons.csv.CSVFormat.DEFAULT;
 
 import java.io.FileNotFoundException;
 import java.io.FileReader;
@@ -34,10 +30,14 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Optional;
 
-import static org.apache.commons.csv.CSVFormat.DEFAULT;
+import org.apache.ambari.infra.conf.security.PasswordStore;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class S3AccessCsv implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(S3AccessCsv.class);
+  private static final Logger logger = LogManager.getLogger(S3AccessCsv.class);
 
   public static S3AccessCsv file(String path) {
     try {
@@ -77,7 +77,7 @@ public class S3AccessCsv implements PasswordStore {
     } catch (IOException e) {
       throw new UncheckedIOException(e);
     } catch (S3AccessCsvFormatException e) {
-      LOG.warn("Unable to parse csv file: {}", e.getMessage());
+      logger.warn("Unable to parse csv file: {}", e.getMessage());
     }
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
index 76aa734..3e1310a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
@@ -11,8 +11,8 @@ import java.security.NoSuchAlgorithmException;
 
 import org.apache.ambari.infra.conf.security.CompositePasswordStore;
 import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.xmlpull.v1.XmlPullParserException;
 
 import io.minio.MinioClient;
@@ -46,14 +46,14 @@ import io.minio.errors.RegionConflictException;
  */
 public class S3Uploader extends AbstractFileAction {
 
-  private static final Logger LOG = LoggerFactory.getLogger(S3Uploader.class);
+  private static final Logger logger = LogManager.getLogger(S3Uploader.class);
 
   private final MinioClient client;
   private final String keyPrefix;
   private final String bucketName;
 
   public S3Uploader(S3Properties s3Properties, PasswordStore passwordStore) {
-    LOG.info("Initializing S3 client with " + s3Properties);
+    logger.info("Initializing S3 client with " + s3Properties);
 
     this.keyPrefix = s3Properties.getS3KeyPrefix();
     this.bucketName = s3Properties.getS3BucketName();
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
index 7f8fd07..ed77a06 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
@@ -22,19 +22,19 @@ import java.io.IOException;
 import java.io.UncheckedIOException;
 
 import org.apache.ambari.infra.job.SolrDAOBase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class SolrDAO extends SolrDAOBase implements DocumentWiper {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAO.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAO.class);
 
-  private final SolrParameters queryProperties;
+  private final SolrProperties queryProperties;
 
-  public SolrDAO(SolrParameters queryProperties) {
+  public SolrDAO(SolrProperties queryProperties) {
     super(queryProperties.getZooKeeperConnectionString(), queryProperties.getCollection());
     this.queryProperties = queryProperties;
   }
@@ -53,7 +53,7 @@ public class SolrDAO extends SolrDAOBase implements DocumentWiper {
             .build();
     query.setRows(rows);
 
-    LOG.info("Executing solr query {}", query.toLocalParamsString());
+    logger.info("Executing solr query {}", query.toLocalParamsString());
 
     try {
       CloudSolrClient client = createClient();
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java
deleted file mode 100644
index a793c9b..0000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class SolrParameters {
-  private String zooKeeperConnectionString;
-  private String collection;
-  private String queryText;
-  private String filterQueryText;
-  private String[] sortColumn;
-  private String deleteQueryText;
-
-  public String getZooKeeperConnectionString() {
-    return zooKeeperConnectionString;
-  }
-
-  public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
-    this.zooKeeperConnectionString = zooKeeperConnectionString;
-  }
-
-  public String getCollection() {
-    return collection;
-  }
-
-  public void setCollection(String collection) {
-    this.collection = collection;
-  }
-
-  public String getQueryText() {
-    return queryText;
-  }
-
-  public void setQueryText(String queryText) {
-    this.queryText = queryText;
-  }
-
-  public String getFilterQueryText() {
-    return filterQueryText;
-  }
-
-  public void setFilterQueryText(String filterQueryText) {
-    this.filterQueryText = filterQueryText;
-  }
-
-  public String[] getSortColumn() {
-    return sortColumn;
-  }
-
-  public void setSortColumn(String[] sortColumn) {
-    this.sortColumn = sortColumn;
-  }
-
-  public String getDeleteQueryText() {
-    return deleteQueryText;
-  }
-
-  public void setDeleteQueryText(String deleteQueryText) {
-    this.deleteQueryText = deleteQueryText;
-  }
-
-  public SolrQueryBuilder toQueryBuilder() {
-    return new SolrQueryBuilder().
-            setQueryText(queryText)
-            .setFilterQueryText(filterQueryText)
-            .addSort(sortColumn);
-  }
-
-  public void validate() {
-    if (isBlank(zooKeeperConnectionString))
-      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
-
-    if (isBlank(collection))
-      throw new IllegalArgumentException("The property collection can not be null or empty string!");
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
index 43e871f..cf26cda 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
@@ -18,6 +18,8 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import static org.apache.commons.lang.StringUtils.isBlank;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -79,13 +81,28 @@ public class SolrProperties {
     this.deleteQueryText = deleteQueryText;
   }
 
-  public SolrParameters merge(JobParameters jobParameters) {
-    SolrParameters solrParameters = new SolrParameters();
-    solrParameters.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
-    solrParameters.setCollection(jobParameters.getString("collection", collection));
-    solrParameters.setQueryText(jobParameters.getString("queryText", queryText));
-    solrParameters.setFilterQueryText(jobParameters.getString("filterQueryText", filterQueryText));
-    solrParameters.setDeleteQueryText(jobParameters.getString("deleteQueryText", deleteQueryText));
+  public SolrQueryBuilder toQueryBuilder() {
+    return new SolrQueryBuilder().
+            setQueryText(queryText)
+            .setFilterQueryText(filterQueryText)
+            .addSort(sortColumn);
+  }
+
+  public void validate() {
+    if (isBlank(zooKeeperConnectionString))
+      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
+
+    if (isBlank(collection))
+      throw new IllegalArgumentException("The property collection can not be null or empty string!");
+  }
+
+  public SolrProperties merge(JobParameters jobParameters) {
+    SolrProperties solrProperties = new SolrProperties();
+    solrProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    solrProperties.setCollection(jobParameters.getString("collection", collection));
+    solrProperties.setQueryText(jobParameters.getString("queryText", queryText));
+    solrProperties.setFilterQueryText(jobParameters.getString("filterQueryText", filterQueryText));
+    solrProperties.setDeleteQueryText(jobParameters.getString("deleteQueryText", deleteQueryText));
 
     String sortValue;
     List<String> sortColumns = new ArrayList<>();
@@ -95,12 +112,12 @@ public class SolrProperties {
       ++i;
     }
     if (!sortColumns.isEmpty()) {
-      solrParameters.setSortColumn(sortColumns.toArray(new String[0]));
+      solrProperties.setSortColumn(sortColumns.toArray(new String[0]));
     }
     else {
-      solrParameters.setSortColumn(sortColumn);
+      solrProperties.setSortColumn(sortColumn);
     }
 
-    return solrParameters;
+    return solrProperties;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
index 27f61fa..d6faa70 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
@@ -73,7 +73,7 @@ public class CleanUpConfiguration {
   @StepScope
   protected TaskHistoryWiper taskHistoryWiper(
           InfraJobExecutionDao infraJobExecutionDao,
-          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") CleanUpParameters cleanUpParameters) {
-    return new TaskHistoryWiper(infraJobExecutionDao, cleanUpParameters.getTtl());
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") CleanUpProperties cleanUpProperties) {
+    return new TaskHistoryWiper(infraJobExecutionDao, cleanUpProperties.getTtl());
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java
deleted file mode 100644
index a4f2141..0000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.cleanup;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.Validatable;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.apache.ambari.infra.json.StringToDurationConverter;
-
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-
-public class CleanUpParameters implements Validatable {
-
-  @JsonSerialize(converter = DurationToStringConverter.class)
-  @JsonDeserialize(converter = StringToDurationConverter.class)
-  private Duration ttl;
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  @Override
-  public void validate() {
-
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
index 7bf9808..b29dcc1 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
@@ -23,15 +23,22 @@ import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
 import java.time.Duration;
 
 import org.apache.ambari.infra.job.JobProperties;
+import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.StringToDurationConverter;
 import org.springframework.batch.core.JobParameters;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs.clean-up")
-public class CleanUpProperties extends JobProperties<CleanUpParameters> {
+public class CleanUpProperties extends JobProperties<CleanUpProperties> implements Validatable {
 
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @JsonDeserialize(converter = StringToDurationConverter.class)
   private Duration ttl;
 
   protected CleanUpProperties() {
@@ -47,9 +54,14 @@ public class CleanUpProperties extends JobProperties<CleanUpParameters> {
   }
 
   @Override
-  public CleanUpParameters merge(JobParameters jobParameters) {
-    CleanUpParameters cleanUpParameters = new CleanUpParameters();
-    cleanUpParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return cleanUpParameters;
+  public void validate() {
+
+  }
+
+  @Override
+  public CleanUpProperties merge(JobParameters jobParameters) {
+    CleanUpProperties cleanUpProperties = new CleanUpProperties();
+    cleanUpProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return cleanUpProperties;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
index 594515e..5a296de 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
@@ -22,8 +22,8 @@ import java.time.Duration;
 import java.time.OffsetDateTime;
 
 import org.apache.ambari.infra.job.InfraJobExecutionDao;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.StepContribution;
 import org.springframework.batch.core.scope.context.ChunkContext;
 import org.springframework.batch.core.step.tasklet.Tasklet;
@@ -31,7 +31,7 @@ import org.springframework.batch.repeat.RepeatStatus;
 
 public class TaskHistoryWiper implements Tasklet {
 
-  private static final Logger logger = LoggerFactory.getLogger(TaskHistoryWiper.class);
+  private static final Logger logger = LogManager.getLogger(TaskHistoryWiper.class);
   public static final Duration DEFAULT_TTL = Duration.ofHours(1);
 
   private final InfraJobExecutionDao infraJobExecutionDao;
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
similarity index 73%
rename from ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java
rename to ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
index 71d98e1..be4e651 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
@@ -18,18 +18,21 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.time.Duration;
 
+import org.apache.ambari.infra.job.JobProperties;
 import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
 import org.apache.ambari.infra.json.StringToDurationConverter;
+import org.springframework.batch.core.JobParameters;
 
 import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class DeletingParameters implements Validatable {
+public class DeletingProperties extends JobProperties<DeletingProperties> implements Validatable {
   private String zooKeeperConnectionString;
   private String collection;
   private String filterField;
@@ -98,4 +101,16 @@ public class DeletingParameters implements Validatable {
     if (isBlank(filterField))
       throw new IllegalArgumentException("The property filterField can not be null or empty string!");
   }
+
+  @Override
+  public DeletingProperties merge(JobParameters jobParameters) {
+    DeletingProperties deletingProperties = new DeletingProperties();
+    deletingProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    deletingProperties.setCollection(jobParameters.getString("collection", collection));
+    deletingProperties.setFilterField(jobParameters.getString("filterField", filterField));
+    deletingProperties.setStart(jobParameters.getString("start", "*"));
+    deletingProperties.setEnd(jobParameters.getString("end", "*"));
+    deletingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return deletingProperties;
+  }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
index f9a782c..1d6fb9a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
@@ -38,7 +38,7 @@ import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
 @Configuration
-public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DocumentDeletingProperties, DeletingParameters> {
+public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DeletingProperties, DeletingProperties> {
 
   private final StepBuilderFactory steps;
   private final Step deleteStep;
@@ -72,7 +72,7 @@ public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<Doc
   @Bean
   @StepScope
   public DocumentWiperTasklet documentWiperTasklet(
-          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") DeletingParameters parameters) {
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") DeletingProperties parameters) {
     return new DocumentWiperTasklet(parameters);
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
deleted file mode 100644
index e7ecc13..0000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.springframework.batch.core.JobParameters;
-
-public class DocumentDeletingProperties extends JobProperties<DeletingParameters> {
-  private String zooKeeperConnectionString;
-  private String collection;
-  private String filterField;
-  private Duration ttl;
-
-  public String getZooKeeperConnectionString() {
-    return zooKeeperConnectionString;
-  }
-
-  public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
-    this.zooKeeperConnectionString = zooKeeperConnectionString;
-  }
-
-  public String getCollection() {
-    return collection;
-  }
-
-  public void setCollection(String collection) {
-    this.collection = collection;
-  }
-
-  public String getFilterField() {
-    return filterField;
-  }
-
-  public void setFilterField(String filterField) {
-    this.filterField = filterField;
-  }
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  @Override
-  public DeletingParameters merge(JobParameters jobParameters) {
-    DeletingParameters deletingParameters = new DeletingParameters();
-    deletingParameters.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
-    deletingParameters.setCollection(jobParameters.getString("collection", collection));
-    deletingParameters.setFilterField(jobParameters.getString("filterField", filterField));
-    deletingParameters.setStart(jobParameters.getString("start", "*"));
-    deletingParameters.setEnd(jobParameters.getString("end", "*"));
-    deletingParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return deletingParameters;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
index 1dc0caf..92d0428 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentDeletingPropertyMap {
-  private Map<String, DocumentDeletingProperties> solrDataDeleting;
+  private Map<String, DeletingProperties> solrDataDeleting;
 
-  public Map<String, DocumentDeletingProperties> getSolrDataDeleting() {
+  public Map<String, DeletingProperties> getSolrDataDeleting() {
     return solrDataDeleting;
   }
 
-  public void setSolrDataDeleting(Map<String, DocumentDeletingProperties> solrDataDeleting) {
+  public void setSolrDataDeleting(Map<String, DeletingProperties> solrDataDeleting) {
     this.solrDataDeleting = solrDataDeleting;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
index 69d8c62..cae64f4 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
@@ -28,11 +28,11 @@ import org.springframework.batch.core.step.tasklet.Tasklet;
 import org.springframework.batch.repeat.RepeatStatus;
 
 public class DocumentWiperTasklet extends SolrDAOBase implements Tasklet {
-  private final DeletingParameters parameters;
+  private final DeletingProperties parameters;
 
-  public DocumentWiperTasklet(DeletingParameters deletingParameters) {
-    super(deletingParameters.getZooKeeperConnectionString(), deletingParameters.getCollection());
-    parameters = deletingParameters;
+  public DocumentWiperTasklet(DeletingProperties deletingProperties) {
+    super(deletingProperties.getZooKeeperConnectionString(), deletingProperties.getCollection());
+    parameters = deletingProperties;
   }
 
   @Override
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
index a124e4d..ff5ad6d 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
@@ -18,17 +18,17 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.item.ItemProcessor;
 
 public class DummyItemProcessor implements ItemProcessor<DummyObject, String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemProcessor.class);
 
   @Override
   public String process(DummyObject input) throws Exception {
-    LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
+    logger.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
     Thread.sleep(10000);
     return String.format("%s, %s", input.getF1(), input.getF2());
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
index 89ad013..3adb91a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
@@ -18,14 +18,6 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.annotation.BeforeStep;
-import org.springframework.batch.item.ItemWriter;
-
-import javax.inject.Inject;
 import java.io.File;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -33,9 +25,18 @@ import java.nio.file.Paths;
 import java.util.Date;
 import java.util.List;
 
+import javax.inject.Inject;
+
+import org.apache.ambari.infra.conf.InfraManagerDataConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+import org.springframework.batch.item.ItemWriter;
+
 public class DummyItemWriter implements ItemWriter<String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemWriter.class);
 
   private StepExecution stepExecution;
 
@@ -44,17 +45,17 @@ public class DummyItemWriter implements ItemWriter<String> {
 
   @Override
   public void write(List<? extends String> values) throws Exception {
-    LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
+    logger.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
     Thread.sleep(1000);
     String outputDirectoryLocation = String.format("%s%s%s%s", infraManagerDataConfig.getDataFolder(), File.separator, "dummyOutput-", new Date().getTime());
     Path pathToDirectory = Paths.get(outputDirectoryLocation);
     Path pathToFile = Paths.get(String.format("%s%s%s", outputDirectoryLocation, File.separator, "dummyOutput.txt"));
     Files.createDirectories(pathToDirectory);
-    LOG.info("Write location to step execution context...");
+    logger.info("Write location to step execution context...");
     stepExecution.getExecutionContext().put("stepOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write location to job execution context...");
+    logger.info("Write location to job execution context...");
     stepExecution.getJobExecution().getExecutionContext().put("jobOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write to file: {}", pathToFile.toAbsolutePath());
+    logger.info("Write to file: {}", pathToFile.toAbsolutePath());
     Files.write(pathToFile, values.toString().getBytes());
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
index 99c50e8..4602b9b 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
@@ -18,28 +18,28 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobExecutionListener;
 
 public class DummyJobListener implements JobExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyJobListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyJobListener.class);
 
   @Override
   public void beforeJob(JobExecution jobExecution) {
-    LOG.info("Dummy - before job execution");
+    logger.info("Dummy - before job execution");
   }
 
   @Override
   public void afterJob(JobExecution jobExecution) {
-    LOG.info("Dummy - after job execution");
+    logger.info("Dummy - after job execution");
     if (jobExecution.getExecutionContext().get("jobOutputLocation") != null) {
       String jobOutputLocation = (String) jobExecution.getExecutionContext().get("jobOutputLocation");
       String exitDescription = "file://" + jobOutputLocation;
-      LOG.info("Add exit description '{}'", exitDescription);
+      logger.info("Add exit description '{}'", exitDescription);
       jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode(), exitDescription));
     }
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
index 548e650..838d846 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
@@ -18,24 +18,24 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepExecution;
 import org.springframework.batch.core.StepExecutionListener;
 
 public class DummyStepListener implements StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyStepListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyStepListener.class);
 
   @Override
   public void beforeStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - before step execution");
+    logger.info("Dummy step - before step execution");
   }
 
   @Override
   public ExitStatus afterStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - after step execution");
+    logger.info("Dummy step - after step execution");
     return stepExecution.getExitStatus();
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
index 64ba21f..86ffc1a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
@@ -40,8 +40,8 @@ import org.apache.ambari.infra.model.JobOperationParams;
 import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.history.StepExecutionHistory;
 import org.springframework.batch.admin.service.JobService;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
@@ -67,7 +67,7 @@ import com.google.common.collect.Lists;
 @Named
 public class JobManager implements Jobs {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
+  private static final Logger logger = LogManager.getLogger(JobManager.class);
 
   @Inject
   private JobService jobService;
@@ -117,7 +117,7 @@ public class JobManager implements Jobs {
     try {
       jobService.stop(jobExecutionId);
     } catch (JobExecutionNotRunningException e) {
-      LOG.warn(String.format("Job is not running jobExecutionId=%d", jobExecutionId), e.getMessage());
+      logger.warn(String.format("Job is not running jobExecutionId=%d", jobExecutionId), e.getMessage());
     }
     jobService.abandon(jobExecutionId);
   }
@@ -162,7 +162,7 @@ public class JobManager implements Jobs {
     } else {
       throw new UnsupportedOperationException("Unsupported operaration");
     }
-    LOG.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
+    logger.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
     return new JobExecutionInfoResponse(jobExecution, timeZone);
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
index 079cce3..9f47bdd 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
@@ -19,9 +19,17 @@
 package org.apache.ambari.infra.rest;
 
 
-import com.google.common.collect.Maps;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.Map;
+
+import javax.batch.operations.JobExecutionAlreadyCompleteException;
+import javax.inject.Named;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.core.JobParametersInvalidException;
 import org.springframework.batch.core.launch.JobExecutionNotFailedException;
@@ -38,19 +46,13 @@ import org.springframework.batch.core.repository.JobRestartException;
 import org.springframework.batch.core.step.NoSuchStepException;
 import org.springframework.web.bind.MethodArgumentNotValidException;
 
-import javax.batch.operations.JobExecutionAlreadyCompleteException;
-import javax.inject.Named;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.Map;
+import com.google.common.collect.Maps;
 
 @Named
 @Provider
 public class JobExceptionMapper implements ExceptionMapper<Throwable> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class);
+  private static final Logger logger = LogManager.getLogger(JobExceptionMapper.class);
 
   private static final Map<Class, Response.Status> exceptionStatusCodeMap = Maps.newHashMap();
 
@@ -75,13 +77,13 @@ public class JobExceptionMapper implements ExceptionMapper<Throwable> {
 
   @Override
   public Response toResponse(Throwable throwable) {
-    LOG.error("REST Exception occurred:", throwable);
+    logger.error("REST Exception occurred:", throwable);
     Response.Status status = Response.Status.INTERNAL_SERVER_ERROR;
 
     for (Map.Entry<Class, Response.Status> entry : exceptionStatusCodeMap.entrySet()) {
       if (throwable.getClass().isAssignableFrom(entry.getKey())) {
         status = entry.getValue();
-        LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
+        logger.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
         break;
       }
     }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
index 7e353fb..43f7c41 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
@@ -48,8 +48,8 @@ import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
 import org.apache.ambari.infra.model.StepExecutionRequest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.admin.web.JobInfo;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -74,7 +74,7 @@ import io.swagger.annotations.Authorization;
 @Named
 @Scope("request")
 public class JobResource {
-  private static final Logger LOG = LoggerFactory.getLogger(JobResource.class);
+  private static final Logger logger = LogManager.getLogger(JobResource.class);
 
   @Inject
   private JobManager jobManager;
@@ -98,7 +98,7 @@ public class JobResource {
     String params = request.getParams();
     JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
     if (params != null) {
-      LOG.info("Parsing parameters of job {} '{}'", jobName, params);
+      logger.info("Parsing parameters of job {} '{}'", jobName, params);
       Splitter.on(',')
               .trimResults()
               .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults())
diff --git a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
index 71d25b6..d4b4313 100644
--- a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
+++ b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
@@ -36,7 +36,7 @@ public class  SolrPropertiesTest {
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn().length, is(2));
     assertThat(solrParameters.getSortColumn()[0], is("logtime"));
     assertThat(solrParameters.getSortColumn()[1], is("id"));
@@ -48,7 +48,7 @@ public class  SolrPropertiesTest {
             .toJobParameters();
 
     SolrProperties solrProperties = new SolrProperties();
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn(), is(nullValue()));
   }
 
@@ -59,7 +59,7 @@ public class  SolrPropertiesTest {
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn().length, is(1));
     assertThat(solrParameters.getSortColumn()[0], is("testColumn"));
   }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
index cc69985..e05586a 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
@@ -35,7 +35,7 @@ import org.slf4j.LoggerFactory;
 
 public class AmbariSolrCloudCLI {
 
-  private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudCLI.class);
+  private static final Logger logger = LoggerFactory.getLogger(AmbariSolrCloudCLI.class);
 
   private static final int ZK_CLIENT_TIMEOUT = 60000; // 1 minute
   private static final int ZK_CLIENT_CONNECT_TIMEOUT = 60000; // 1 minute
@@ -700,9 +700,9 @@ public class AmbariSolrCloudCLI {
 
   private static void exit(int exitCode, String message) {
     if (message != null){
-      LOG.error(message);
+      logger.error(message);
     }
-    LOG.info("Return code: {}", exitCode);
+    logger.info("Return code: {}", exitCode);
     System.exit(exitCode);
   }
 }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
index 4ef629d..fe7e114 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
@@ -53,7 +53,7 @@ import org.slf4j.LoggerFactory;
  */
 public class AmbariSolrCloudClient {
 
-  private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
+  private static final Logger logger = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
 
   private final String zkConnectString;
   private final String collection;
@@ -127,9 +127,9 @@ public class AmbariSolrCloudClient {
     List<String> collections = listCollections();
     if (!collections.contains(getCollection())) {
       String collection = new CreateCollectionCommand(getRetryTimes(), getInterval()).run(this);
-      LOG.info("Collection '{}' creation request sent.", collection);
+      logger.info("Collection '{}' creation request sent.", collection);
     } else {
-      LOG.info("Collection '{}' already exits.", getCollection());
+      logger.info("Collection '{}' already exits.", getCollection());
       if (this.isImplicitRouting()) {
         createShard(null);
       }
@@ -140,7 +140,7 @@ public class AmbariSolrCloudClient {
   public String outputCollectionData() throws Exception {
     List<String> collections = listCollections();
     String result = new DumpCollectionsCommand(getRetryTimes(), getInterval(), collections).run(this);
-    LOG.info("Dump collections response: {}", result);
+    logger.info("Dump collections response: {}", result);
     return result;
   }
 
@@ -148,9 +148,9 @@ public class AmbariSolrCloudClient {
    * Set cluster property in clusterprops.json.
    */
   public void setClusterProp() throws Exception {
-    LOG.info("Set cluster prop: '{}'", this.getPropName());
+    logger.info("Set cluster prop: '{}'", this.getPropName());
     String newPropValue = new SetClusterPropertyZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue);
+    logger.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue);
   }
 
   /**
@@ -159,11 +159,11 @@ public class AmbariSolrCloudClient {
   public void createZnode() throws Exception {
     boolean znodeExists = isZnodeExists(this.znode);
     if (znodeExists) {
-      LOG.info("Znode '{}' already exists.", this.znode);
+      logger.info("Znode '{}' already exists.", this.znode);
     } else {
-      LOG.info("Znode '{}' does not exist. Creating...", this.znode);
+      logger.info("Znode '{}' does not exist. Creating...", this.znode);
       String newZnode = new CreateSolrZnodeZkCommand(getRetryTimes(), getInterval()).run(this);
-      LOG.info("Znode '{}' is created successfully.", newZnode);
+      logger.info("Znode '{}' is created successfully.", newZnode);
     }
   }
 
@@ -172,20 +172,20 @@ public class AmbariSolrCloudClient {
    * E.g.: localhost:2181 and znode: /ambari-solr, checks existance of localhost:2181/ambari-solr
    */
   public boolean isZnodeExists(String znode) throws Exception {
-    LOG.info("Check '{}' znode exists or not", znode);
+    logger.info("Check '{}' znode exists or not", znode);
     boolean result = new CheckZnodeZkCommand(getRetryTimes(), getInterval(), znode).run(this);
     if (result) {
-      LOG.info("'{}' znode exists", znode);
+      logger.info("'{}' znode exists", znode);
     } else {
-      LOG.info("'{}' znode does not exist", znode);
+      logger.info("'{}' znode does not exist", znode);
     }
     return result;
   }
 
   public void setupKerberosPlugin() throws Exception {
-    LOG.info("Setup kerberos plugin in security.json");
+    logger.info("Setup kerberos plugin in security.json");
     new EnableKerberosPluginSolrZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("KerberosPlugin is set in security.json");
+    logger.info("KerberosPlugin is set in security.json");
   }
 
   /**
@@ -206,7 +206,7 @@ public class AmbariSolrCloudClient {
    * Unsecure znode
    */
   public void unsecureZnode() throws Exception {
-    LOG.info("Disable security for znode - ", this.getZnode());
+    logger.info("Disable security for znode - ", this.getZnode());
     new UnsecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this);
   }
 
@@ -215,7 +215,7 @@ public class AmbariSolrCloudClient {
    */
   public String uploadConfiguration() throws Exception {
     String configSet = new UploadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("'{}' is uploaded to zookeeper.", configSet);
+    logger.info("'{}' is uploaded to zookeeper.", configSet);
     return configSet;
   }
 
@@ -224,7 +224,7 @@ public class AmbariSolrCloudClient {
    */
   public String downloadConfiguration() throws Exception {
     String configDir = new DownloadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("Config set is download from zookeeper. ({})", configDir);
+    logger.info("Config set is download from zookeeper. ({})", configDir);
     return configDir;
   }
 
@@ -234,9 +234,9 @@ public class AmbariSolrCloudClient {
   public boolean configurationExists() throws Exception {
     boolean configExits = new CheckConfigZkCommand(getRetryTimes(), getInterval()).run(this);
     if (configExits) {
-      LOG.info("Config {} exits", configSet);
+      logger.info("Config {} exits", configSet);
     } else {
-      LOG.info("Configuration '{}' does not exist", configSet);
+      logger.info("Configuration '{}' does not exist", configSet);
     }
     return configExits;
   }
@@ -258,7 +258,7 @@ public class AmbariSolrCloudClient {
       for (String shardName : shardList) {
         if (!existingShards.contains(shardName)) {
           new CreateShardCommand(shardName, getRetryTimes(), getInterval()).run(this);
-          LOG.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
+          logger.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
           existingShards.add(shardName);
         }
       }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
index 5e87859..74c0480 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
@@ -24,7 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public abstract class AbstractRetryCommand<RESPONSE> {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractRetryCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(AbstractRetryCommand.class);
 
   private final int interval;
   private final int maxRetries;
@@ -44,9 +44,9 @@ public abstract class AbstractRetryCommand<RESPONSE> {
     try {
       return createAndProcessRequest(solrCloudClient);
     } catch (Exception ex) {
-      LOG.error(ex.getMessage(), ex);
+      logger.error(ex.getMessage(), ex);
       tries++;
-      LOG.info("Command failed, tries again (tries: {})", tries);
+      logger.info("Command failed, tries again (tries: {})", tries);
       if (maxRetries == tries) {
         throw new AmbariSolrCloudClientException(String.format("Maximum retries exceeded: %d", tries), ex);
       } else {
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
index 6958623..f114b54 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
@@ -18,6 +18,10 @@
  */
 package org.apache.ambari.infra.solr.commands;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
 import org.apache.ambari.infra.solr.util.AclUtils;
 import org.apache.commons.lang.StringUtils;
@@ -31,13 +35,9 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand<Boolean> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class);
 
   public SecureSolrZNodeZkCommand(int maxRetries, int interval) {
     super(maxRetries, interval);
@@ -46,9 +46,8 @@ public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand<Bool
   @Override
   protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
     String zNode = client.getZnode();
-    List<ACL> newAclList = new ArrayList<>();
     List<ACL> saslUserList = AclUtils.createAclListFromSaslUsers(client.getSaslUsers().split(","));
-    newAclList.addAll(saslUserList);
+    List<ACL> newAclList = new ArrayList<>(saslUserList);
     newAclList.add(new ACL(ZooDefs.Perms.READ, new Id("world", "anyone")));
 
     String configsPath = String.format("%s/%s", zNode, "configs");
@@ -61,16 +60,15 @@ public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand<Bool
 
     AclUtils.setRecursivelyOn(client.getSolrZkClient().getSolrZooKeeper(), zNode, newAclList, excludePaths);
 
-    List<ACL> commonConfigAcls = new ArrayList<>();
-    commonConfigAcls.addAll(saslUserList);
+    List<ACL> commonConfigAcls = new ArrayList<>(saslUserList);
     commonConfigAcls.add(new ACL(ZooDefs.Perms.READ | ZooDefs.Perms.CREATE, new Id("world", "anyone")));
 
-    LOG.info("Set sasl users for znode '{}' : {}", client.getZnode(), StringUtils.join(saslUserList, ","));
-    LOG.info("Skip {}/configs and {}/collections", client.getZnode(), client.getZnode());
+    logger.info("Set sasl users for znode '{}' : {}", client.getZnode(), StringUtils.join(saslUserList, ","));
+    logger.info("Skip {}/configs and {}/collections", client.getZnode(), client.getZnode());
     solrZooKeeper.setACL(configsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(configsPath, new Stat()), commonConfigAcls), -1);
     solrZooKeeper.setACL(collectionsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(collectionsPath, new Stat()), commonConfigAcls), -1);
 
-    LOG.info("Set world:anyone to 'cr' on  {}/configs and {}/collections", client.getZnode(), client.getZnode());
+    logger.info("Set world:anyone to 'cr' on  {}/configs and {}/collections", client.getZnode(), client.getZnode());
     AclUtils.setRecursivelyOn(solrZooKeeper, configsPath, saslUserList);
     AclUtils.setRecursivelyOn(solrZooKeeper, collectionsPath, saslUserList);
 
@@ -79,7 +77,7 @@ public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand<Bool
 
   private void createZnodeIfNeeded(String configsPath, SolrZkClient zkClient) throws KeeperException, InterruptedException {
     if (!zkClient.exists(configsPath, true)) {
-      LOG.info("'{}' does not exist. Creating it ...", configsPath);
+      logger.info("'{}' does not exist. Creating it ...", configsPath);
       zkClient.makePath(configsPath, true);
     }
   }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
index 2b360fb..ec300b3 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
@@ -18,6 +18,10 @@
  */
 package org.apache.ambari.infra.solr.commands;
 
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
 import org.apache.ambari.infra.solr.domain.AmbariSolrState;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -27,13 +31,9 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.nio.charset.StandardCharsets;
-import java.util.HashMap;
-import java.util.Map;
-
 public class UpdateStateFileZkCommand extends AbstractStateFileZkCommand {
 
-  private static final Logger LOG = LoggerFactory.getLogger(UpdateStateFileZkCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(UpdateStateFileZkCommand.class);
 
   private String unsecureZnode;
 
@@ -46,13 +46,13 @@ public class UpdateStateFileZkCommand extends AbstractStateFileZkCommand {
   protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
     boolean secure = client.isSecure();
     String stateFile = String.format("%s/%s", unsecureZnode, AbstractStateFileZkCommand.STATE_FILE);
-    AmbariSolrState result = null;
+    AmbariSolrState result;
     if (secure) {
-      LOG.info("Update state file in secure mode.");
+      logger.info("Update state file in secure mode.");
       updateStateFile(client, zkClient, AmbariSolrState.SECURE, stateFile);
       result = AmbariSolrState.SECURE;
     } else {
-      LOG.info("Update state file in unsecure mode.");
+      logger.info("Update state file in unsecure mode.");
       updateStateFile(client, zkClient, AmbariSolrState.UNSECURE, stateFile);
       result = AmbariSolrState.UNSECURE;
     }
@@ -62,15 +62,15 @@ public class UpdateStateFileZkCommand extends AbstractStateFileZkCommand {
   private void updateStateFile(AmbariSolrCloudClient client, SolrZkClient zkClient, AmbariSolrState stateToUpdate,
                                String stateFile) throws Exception {
     if (!zkClient.exists(stateFile, true)) {
-      LOG.info("State file does not exits. Initializing it as '{}'", stateToUpdate);
+      logger.info("State file does not exits. Initializing it as '{}'", stateToUpdate);
       zkClient.create(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8),
         CreateMode.PERSISTENT, true);
     } else {
       AmbariSolrState stateOnSecure = getStateFromJson(client, stateFile);
       if (stateToUpdate.equals(stateOnSecure)) {
-        LOG.info("State file is in '{}' mode. No update.", stateOnSecure);
+        logger.info("State file is in '{}' mode. No update.", stateOnSecure);
       } else {
-        LOG.info("State file is in '{}' mode. Updating it to '{}'", stateOnSecure, stateToUpdate);
+        logger.info("State file is in '{}' mode. Updating it to '{}'", stateOnSecure, stateToUpdate);
         zkClient.setData(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8), true);
       }
     }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
index f46565b..d555995 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
@@ -18,20 +18,19 @@
  */
 package org.apache.ambari.infra.solr.util;
 
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 public class ShardUtils {
 
-  private static final Logger LOG = LoggerFactory.getLogger(ShardUtils.class);
+  private static final Logger logger = LoggerFactory.getLogger(ShardUtils.class);
 
   public static String generateShardListStr(int maxShardsPerNode) {
     String shardsListStr = "";
@@ -54,15 +53,13 @@ public class ShardUtils {
   }
 
   public static Collection<String> getShardNamesFromSlices(Collection<Slice> slices, String collection) {
-    Collection<String> result = new HashSet<String>();
-    Iterator<Slice> iter = slices.iterator();
-    while (iter.hasNext()) {
-      Slice slice = iter.next();
+    Collection<String> result = new HashSet<>();
+    for (Slice slice : slices) {
       for (Replica replica : slice.getReplicas()) {
-        LOG.info("collectionName=" + collection + ", slice.name="
-          + slice.getName() + ", slice.state=" + slice.getState()
-          + ", replica.core=" + replica.getStr("core")
-          + ", replica.state=" + replica.getStr("state"));
+        logger.info("collectionName=" + collection + ", slice.name="
+                + slice.getName() + ", slice.state=" + slice.getState()
+                + ", replica.core=" + replica.getStr("core")
+                + ", replica.state=" + replica.getStr("state"));
         result.add(slice.getName());
       }
     }
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
index 525c419..510dc09 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
@@ -27,7 +27,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class MetricsUtils {
-  private static final Logger LOG = LoggerFactory.getLogger(MetricsUtils.class);
+  private static final Logger logger = LoggerFactory.getLogger(MetricsUtils.class);
   private static final String APPID = "ambari-infra-solr";
   public static final String NAME_PREFIX = "infra.";
 
@@ -41,15 +41,15 @@ public class MetricsUtils {
       String ipHostName = ip.getHostName();
       String canonicalHostName = ip.getCanonicalHostName();
       if (!canonicalHostName.equalsIgnoreCase(ipAddress)) {
-        LOG.info("Using InetAddress.getCanonicalHostName()={}", canonicalHostName);
+        logger.info("Using InetAddress.getCanonicalHostName()={}", canonicalHostName);
         hostName = canonicalHostName;
       } else {
-        LOG.info("Using InetAddress.getHostName()={}", ipHostName);
+        logger.info("Using InetAddress.getHostName()={}", ipHostName);
         hostName = ipHostName;
       }
-      LOG.info("ipAddress={}, ipHostName={}, canonicalHostName={}, hostName={}", ipAddress, ipHostName, canonicalHostName, hostName);
+      logger.info("ipAddress={}, ipHostName={}, canonicalHostName={}, hostName={}", ipAddress, ipHostName, canonicalHostName, hostName);
     } catch (UnknownHostException e) {
-      LOG.error("Error getting hostname.", e);
+      logger.error("Error getting hostname.", e);
     }
 
     return hostName;
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
index 9a837fa..9ebe77b 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
@@ -46,7 +46,7 @@ import com.codahale.metrics.Timer;
 
 public class ScheduledAMSReporter<T> extends ScheduledReporter {
 
-  private static final Logger LOG = LoggerFactory.getLogger(ScheduledAMSReporter.class);
+  private static final Logger logger = LoggerFactory.getLogger(ScheduledAMSReporter.class);
 
   private final SolrMetricsSink amsClient;
   private final String namePrefix;
@@ -92,7 +92,7 @@ public class ScheduledAMSReporter<T> extends ScheduledReporter {
       amsClient.emitMetrics(timelineMetrics);
     }
     catch (Exception ex) {
-      LOG.error("Unable to collect and send metrics", ex);
+      logger.error("Unable to collect and send metrics", ex);
     }
   }
 
@@ -100,7 +100,7 @@ public class ScheduledAMSReporter<T> extends ScheduledReporter {
     try {
       timelineMetricList.addAll(gaugeConverter.convert(metricName, gauge, currentMillis));
     } catch (Exception ex) {
-      LOG.error("Unable to get value of gauge metric " + metricName, ex);
+      logger.error("Unable to get value of gauge metric " + metricName, ex);
     }
   }
 
@@ -111,7 +111,7 @@ public class ScheduledAMSReporter<T> extends ScheduledReporter {
       timelineMetricList.add(toTimelineMetric(metricName + ".avgTimePerRequest", snapshot.getMean(), currentTime));
       timelineMetricList.add(toTimelineMetric(metricName + ".medianRequestTime", snapshot.getMedian(), currentTime));
     } catch (Exception ex) {
-      LOG.error("Unable to get value of timer metric " + metricName, ex);
+      logger.error("Unable to get value of timer metric " + metricName, ex);
     }
   }
 }
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
index 201c797..9236d5d 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
@@ -30,14 +30,14 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SolrMetricsSink extends AbstractTimelineMetricsSink {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrMetricsSink.class);
+  private static final Logger logger = LoggerFactory.getLogger(SolrMetricsSink.class);
 
   private final Collection<String> collectorHosts;
   private final int port;
   private final AMSProtocol protocol;
 
   public SolrMetricsSink(String[] collectorHosts, int port, AMSProtocol protocol, SolrMetricsSecurityConfig securityConfig) {
-    LOG.info("Setting up SolrMetricsSink protocol={} hosts={} port={}", protocol.name(), join(collectorHosts, ","), port);
+    logger.info("Setting up SolrMetricsSink protocol={} hosts={} port={}", protocol.name(), join(collectorHosts, ","), port);
     this.collectorHosts = asList(collectorHosts);
     this.port = port;
     this.protocol = protocol;