You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by GitBox <gi...@apache.org> on 2018/11/15 19:04:21 UTC

[GitHub] kasakrisz closed pull request #15: AMBARI-24895 - Infra Manager: code clean up

kasakrisz closed pull request #15: AMBARI-24895 - Infra Manager: code clean up
URL: https://github.com/apache/ambari-infra/pull/15
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/ambari-infra-manager-it/pom.xml b/ambari-infra-manager-it/pom.xml
index 68d43529..f1f296bc 100644
--- a/ambari-infra-manager-it/pom.xml
+++ b/ambari-infra-manager-it/pom.xml
@@ -157,6 +157,16 @@
       <artifactId>hadoop-hdfs-client</artifactId>
       <version>${hadoop.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+      <version>2.11.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <version>2.11.1</version>
+    </dependency>
   </dependencies>
 
   <build>
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
index ddc4f000..fdccbab9 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
@@ -39,8 +39,8 @@
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.HttpClientBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.type.TypeReference;
@@ -49,7 +49,7 @@
 
 // TODO: use swagger
 public class InfraClient implements AutoCloseable {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraClient.class);
+  private static final Logger logger = LogManager.getLogger(InfraClient.class);
 
   private final CloseableHttpClient httpClient;
   private final URI baseUrl;
@@ -77,7 +77,7 @@ private HttpResponse execute(HttpRequestBase post) {
     try (CloseableHttpResponse response = httpClient.execute(post)) {
       String responseBodyText = IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
       int statusCode = response.getStatusLine().getStatusCode();
-      LOG.info("Response code {} body {} ", statusCode, responseBodyText);
+      logger.info("Response code {} body {} ", statusCode, responseBodyText);
       if (!(200 <= statusCode && statusCode <= 299))
         throw new RuntimeException("Error while executing http request: " + responseBodyText);
       return new HttpResponse(statusCode, responseBodyText);
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
index 0dcc91a9..7bc952ae 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
@@ -31,17 +31,17 @@
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrInputDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class Solr {
-  private static final Logger LOG = LoggerFactory.getLogger(Solr.class);
+  private static final Logger logger = LogManager.getLogger(Solr.class);
   public static final String AUDIT_LOGS_COLLECTION = "audit_logs";
   public static final String HADOOP_LOGS_COLLECTION = "hadoop_logs";
   private static final int SOLR_PORT = 8983;
@@ -85,7 +85,7 @@ public void add(SolrInputDocument solrInputDocument) {
   }
 
   public void createSolrCollection(String collectionName) {
-    LOG.info("Creating collection");
+    logger.info("Creating collection");
     runCommand(new String[]{"docker", "exec", "docker_solr_1", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"});
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
index f48e1073..8f19ce98 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
@@ -25,11 +25,11 @@
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TestUtil {
-  private static final Logger LOG = LoggerFactory.getLogger(TestUtil.class);
+  private static final Logger logger = LogManager.getLogger(TestUtil.class);
 
   public static void doWithin(int sec, String actionName, BooleanSupplier predicate) {
     doWithin(sec, actionName, () -> {
@@ -54,7 +54,7 @@ public static void doWithin(int sec, String actionName, Runnable runnable) {
         throw new AssertionError(String.format("Unable to perform action '%s' within %d seconds", actionName, sec), exception);
       }
       else {
-        LOG.info("Performing action '{}' failed. retrying...", actionName);
+        logger.info("Performing action '{}' failed. retrying...", actionName);
       }
       try {
         Thread.sleep(1000);
@@ -71,10 +71,10 @@ public static String getDockerHost() {
 
   public static void runCommand(String[] command) {
     try {
-      LOG.info("Exec command: {}", StringUtils.join(command, " "));
+      logger.info("Exec command: {}", StringUtils.join(command, " "));
       Process process = Runtime.getRuntime().exec(command);
       String stdout = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
-      LOG.info("Exec command result {}", stdout);
+      logger.info("Exec command result {}", stdout);
     } catch (Exception e) {
       throw new RuntimeException("Error during execute shell command: ", e);
     }
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
index 3016d67c..c400aeeb 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
@@ -30,14 +30,14 @@
 import java.util.Set;
 
 import org.apache.ambari.infra.Solr;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class MetricsIT {
-  private static final Logger LOG = LoggerFactory.getLogger(MetricsIT.class);
+  private static final Logger logger = LogManager.getLogger(MetricsIT.class);
 
   private static MockMetricsServer metricsServer;
   private static String shellScriptLocation;
@@ -49,7 +49,7 @@ public static void setupMetricsServer() throws Exception {
 
     // TODO: use the same containers as ambari-infra-manager-it
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh";
-    LOG.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
+    logger.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
     Solr solr = new Solr("/usr/lib/ambari-infra-solr/server/solr");
@@ -61,8 +61,8 @@ public static void setupMetricsServer() throws Exception {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
-    LOG.info("shutdown containers");
+  public static void tearDown() {
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
 
@@ -74,10 +74,10 @@ public void testAllMetricsArrived() throws Exception {
       Thread.sleep(1000);
       if (currentTimeMillis() - start > 30 * 1000)
         break;
-      LOG.info("Checking any metrics arrived...");
+      logger.info("Checking any metrics arrived...");
     }
 
-    metricsServer.getNotReceivedMetrics().forEach(metric -> LOG.info("Metric not received: {}", metric));
+    metricsServer.getNotReceivedMetrics().forEach(metric -> logger.info("Metric not received: {}", metric));
     assertThat(metricsServer.getNotReceivedMetrics().isEmpty(), is(true));
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
index 9d2734fc..71974469 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
@@ -27,8 +27,8 @@
 import java.util.Set;
 import java.util.concurrent.ConcurrentSkipListSet;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.google.gson.Gson;
 
@@ -37,7 +37,7 @@
 import spark.servlet.SparkApplication;
 
 public class MockMetricsServer implements SparkApplication {
-  private static final Logger LOG = LoggerFactory.getLogger(MockMetricsServer.class);
+  private static final Logger logger = LogManager.getLogger(MockMetricsServer.class);
   private static final String HOST_NAME = "metrics_collector";
 
   private Set<String> expectedMetrics;
@@ -51,14 +51,14 @@ public void init() {
   }
 
   private Object queryState(Request request, Response response) {
-    LOG.info("Sending hostname {}", HOST_NAME);
+    logger.info("Sending hostname {}", HOST_NAME);
     response.type("application/json");
     return new Gson().toJson(singletonList(HOST_NAME));
   }
 
   private Object logBody(Request req, Response resp) {
     String body = req.body();
-    LOG.info("Incoming metrics {}", body);
+    logger.info("Incoming metrics {}", body);
 
     expectedMetrics.removeIf(body::contains);
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
index 7a748bc1..6ac37099 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
@@ -40,14 +40,14 @@
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.common.SolrInputDocument;
 import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.BeforeStories;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public abstract class AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractInfraSteps.class);
+  private static final Logger logger = LogManager.getLogger(AbstractInfraSteps.class);
 
   private static final int INFRA_MANAGER_PORT = 61890;
   private static final int FAKE_S3_PORT = 4569;
@@ -85,12 +85,12 @@ public void initDockerContainer() throws Exception {
 
     String localDataFolder = getLocalDataFolder();
     if (new File(localDataFolder).exists()) {
-      LOG.info("Clean local data folder {}", localDataFolder);
+      logger.info("Clean local data folder {}", localDataFolder);
       FileUtils.cleanDirectory(new File(localDataFolder));
     }
 
     shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh";
-    LOG.info("Create new docker container for testing Ambari Infra Manager ...");
+    logger.info("Create new docker container for testing Ambari Infra Manager ...");
     runCommand(new String[]{shellScriptLocation, "start"});
 
     dockerHost = getDockerHost();
@@ -101,7 +101,7 @@ public void initDockerContainer() throws Exception {
     solr.createSolrCollection(AUDIT_LOGS_COLLECTION);
     solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
 
-    LOG.info("Initializing s3 client");
+    logger.info("Initializing s3 client");
     s3client = new S3Client(dockerHost, FAKE_S3_PORT, S3_BUCKET_NAME);
 
     checkInfraManagerReachable();
@@ -110,7 +110,7 @@ public void initDockerContainer() throws Exception {
   private void checkInfraManagerReachable() throws Exception {
     try (InfraClient httpClient = getInfraClient()) {
       doWithin(30, "Start Ambari Infra Manager", httpClient::getJobs);
-      LOG.info("Ambari Infra Manager is up and running");
+      logger.info("Ambari Infra Manager is up and running");
     }
   }
 
@@ -155,21 +155,21 @@ protected SolrInputDocument addDocument(OffsetDateTime logtime) {
   public void shutdownContainers() throws Exception {
     Thread.sleep(2000); // sync with s3 server
     List<String> objectKeys = getS3client().listObjectKeys();
-    LOG.info("Found {} files on s3.", objectKeys.size());
-    objectKeys.forEach(objectKey ->  LOG.info("Found file on s3 with key {}", objectKey));
+    logger.info("Found {} files on s3.", objectKeys.size());
+    objectKeys.forEach(objectKey ->  logger.info("Found file on s3 with key {}", objectKey));
 
-    LOG.info("Listing files on hdfs.");
+    logger.info("Listing files on hdfs.");
     try (FileSystem fileSystem = getHdfs()) {
       int count = 0;
       RemoteIterator<LocatedFileStatus> it = fileSystem.listFiles(new Path("/test_audit_logs"), true);
       while (it.hasNext()) {
-        LOG.info("Found file on hdfs with name {}", it.next().getPath().getName());
+        logger.info("Found file on hdfs with name {}", it.next().getPath().getName());
         ++count;
       }
-      LOG.info("{} files found on hfds", count);
+      logger.info("{} files found on hfds", count);
     }
 
-    LOG.info("shutdown containers");
+    logger.info("shutdown containers");
     runCommand(new String[]{shellScriptLocation, "stop"});
   }
 
diff --git a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
index e2bbe9db..ff96c9a1 100644
--- a/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
+++ b/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
@@ -49,18 +49,18 @@
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.jbehave.core.annotations.Given;
 import org.jbehave.core.annotations.Then;
 import org.jbehave.core.annotations.When;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class ExportJobsSteps extends AbstractInfraSteps {
-  private static final Logger LOG = LoggerFactory.getLogger(ExportJobsSteps.class);
+  private static final Logger logger = LogManager.getLogger(ExportJobsSteps.class);
   private Set<String> documentIds = new HashSet<>();
 
   private Map<String, JobExecutionInfo> launchedJobs = new HashMap<>();
@@ -101,7 +101,7 @@ public void startJob(String jobName, String parameters, int waitSec) throws Exce
     Thread.sleep(waitSec * 1000);
     try (InfraClient httpClient = getInfraClient()) {
       JobExecutionInfo jobExecutionInfo = httpClient.startJob(jobName, parameters);
-      LOG.info("Job {} started: {}", jobName, jobExecutionInfo);
+      logger.info("Job {} started: {}", jobName, jobExecutionInfo);
       launchedJobs.put(jobName, jobExecutionInfo);
     }
   }
@@ -208,7 +208,7 @@ public void checkNumberOfFilesOnHdfs(int count, String text, String path, int wa
   @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName")
   public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) {
     File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId()));
-    LOG.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
+    logger.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
     doWithin(5, "Destination directory exists", destinationDirectory::exists);
 
     File[] files = requireNonNull(destinationDirectory.listFiles(),
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
index 9e1a17f8..957a45d8 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
@@ -18,16 +18,16 @@
  */
 package org.apache.ambari.infra.conf.security;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static org.apache.commons.lang.StringUtils.isBlank;
+import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
 
 import java.util.Optional;
 
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class HadoopCredentialStore implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerSecurityConfig.class);
+  private static final Logger logger = LogManager.getLogger(InfraManagerSecurityConfig.class);
   public static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path";
 
   private final String credentialStoreProviderPath;
@@ -48,7 +48,7 @@ public HadoopCredentialStore(String credentialStoreProviderPath) {
       char[] passwordChars = config.getPassword(propertyName);
       return (isNotEmpty(passwordChars)) ? Optional.of(new String(passwordChars)) : Optional.empty();
     } catch (Exception e) {
-      LOG.warn("Could not load password {} from credential store.", propertyName);
+      logger.warn("Could not load password {} from credential store.", propertyName);
       return Optional.empty();
     }
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
index 5d525fac..5ababcc1 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
@@ -25,8 +25,8 @@
 import javax.inject.Inject;
 import javax.inject.Named;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.swagger.jaxrs.config.BeanConfig;
 import io.swagger.models.Swagger;
@@ -36,7 +36,7 @@
 @Named
 public class InfraManagerApiDocStorage {
 
-  private static final Logger LOG = LoggerFactory.getLogger(InfraManagerApiDocStorage.class);
+  private static final Logger logger = LogManager.getLogger(InfraManagerApiDocStorage.class);
 
   private final Map<String, Object> swaggerMap = new ConcurrentHashMap<>();
 
@@ -48,7 +48,7 @@ private void postConstruct() {
     Thread loadApiDocThread = new Thread("load_swagger_api_doc") {
       @Override
       public void run() {
-        LOG.info("Start thread to scan REST API doc from endpoints.");
+        logger.info("Start thread to scan REST API doc from endpoints.");
         Swagger swagger = beanConfig.getSwagger();
         swagger.addSecurityDefinition("basicAuth", new BasicAuthDefinition());
         beanConfig.configure(swagger);
@@ -66,7 +66,7 @@ public void run() {
         } catch (Exception e) {
           e.printStackTrace();
         }
-        LOG.info("Scanning REST API endpoints and generating docs has been successful.");
+        logger.info("Scanning REST API endpoints and generating docs has been successful.");
       }
     };
     loadApiDocThread.setDaemon(true);
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
index 314e52ed..8806cf07 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
@@ -22,8 +22,8 @@
 
 import javax.annotation.PostConstruct;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
 import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
@@ -32,7 +32,7 @@
 import org.springframework.context.event.EventListener;
 
 public abstract class AbstractJobsConfiguration<TProperties extends JobProperties<TParameters>, TParameters extends Validatable> {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractJobsConfiguration.class);
+  private static final Logger logger = LogManager.getLogger(AbstractJobsConfiguration.class);
 
   private final Map<String, TProperties> propertyMap;
   private final JobScheduler scheduler;
@@ -56,13 +56,13 @@ public void registerJobs() {
             .forEach(jobName -> {
               try {
                 propertyMap.get(jobName).validate(jobName);
-                LOG.info("Registering job {}", jobName);
+                logger.info("Registering job {}", jobName);
                 JobBuilder jobBuilder = jobs.get(jobName).listener(new JobsPropertyMap<>(propertyMap));
                 Job job = buildJob(jobBuilder);
                 jobRegistryBeanPostProcessor.postProcessAfterInitialization(job, jobName);
               }
               catch (Exception e) {
-                LOG.warn("Unable to register job " + jobName, e);
+                logger.warn("Unable to register job " + jobName, e);
                 propertyMap.get(jobName).setEnabled(false);
               }
             });
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
index 7d77fbd6..4edfae92 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
@@ -24,8 +24,8 @@
 import javax.inject.Named;
 
 import org.apache.ambari.infra.manager.Jobs;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -40,7 +40,7 @@
 
 @Named
 public class JobScheduler {
-  private static final Logger LOG = LoggerFactory.getLogger(JobScheduler.class);
+  private static final Logger logger = LogManager.getLogger(JobScheduler.class);
 
   private final TaskScheduler scheduler;
   private final Jobs jobs;
@@ -59,7 +59,7 @@ public void schedule(String jobName, SchedulingProperties schedulingProperties)
     }
 
     scheduler.schedule(() -> launchJob(jobName), new CronTrigger(schedulingProperties.getCron()));
-    LOG.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
+    logger.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
   }
 
   private void restartIfFailed(JobExecution jobExecution) {
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
index e6f2c2f1..5569cf00 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
@@ -25,14 +25,14 @@
 import java.util.Optional;
 import java.util.stream.Collectors;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.zookeeper.client.ConnectStringParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public abstract class SolrDAOBase {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAOBase.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAOBase.class);
 
   private final String zooKeeperConnectionString;
   private final String defaultCollection;
@@ -45,14 +45,14 @@ protected SolrDAOBase(String zooKeeperConnectionString, String defaultCollection
   protected void delete(String deleteQueryText) {
     try (CloudSolrClient client = createClient()) {
       try {
-        LOG.info("Executing solr delete by query {}", deleteQueryText);
+        logger.info("Executing solr delete by query {}", deleteQueryText);
         client.deleteByQuery(deleteQueryText);
         client.commit();
       } catch (Exception e) {
         try {
           client.rollback();
         } catch (SolrServerException e1) {
-          LOG.warn("Unable to rollback after solr delete operation failure.", e1);
+          logger.warn("Unable to rollback after solr delete operation failure.", e1);
         }
         throw new RuntimeException(e);
       }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
index 3df18b69..3f89ec87 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
@@ -18,19 +18,19 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
 public abstract class AbstractFileAction implements FileAction {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractFileAction.class);
+  private static final Logger logger = LogManager.getLogger(AbstractFileAction.class);
 
   @Override
   public File perform(File inputFile) {
     File outputFile =  onPerform(inputFile);
     if (!inputFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
     return outputFile;
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
similarity index 72%
rename from ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java
rename to ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
index 5c783d6e..3ad3926f 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingParameters.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ArchivingProperties.java
@@ -22,29 +22,33 @@
 import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS;
 import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL;
 import static org.apache.ambari.infra.job.archive.ExportDestination.S3;
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
+import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.time.Duration;
 import java.util.Optional;
 
+import org.apache.ambari.infra.job.JobProperties;
 import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
 import org.apache.ambari.infra.json.FsPermissionToStringConverter;
 import org.apache.ambari.infra.json.StringToDurationConverter;
 import org.apache.ambari.infra.json.StringToFsPermissionConverter;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.springframework.batch.core.JobParameters;
 
 import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class ArchivingParameters implements Validatable {
+public class ArchivingProperties extends JobProperties<ArchivingProperties> implements Validatable {
   private int readBlockSize;
   private int writeBlockSize;
   private ExportDestination destination;
   private String localDestinationDirectory;
   private String fileNameSuffixColumn;
   private String fileNameSuffixDateFormat;
-  private SolrParameters solr;
+  private SolrProperties solr;
   private String s3AccessFile;
   private String s3KeyPrefix;
   private String s3BucketName;
@@ -110,11 +114,11 @@ public void setFileNameSuffixDateFormat(String fileNameSuffixDateFormat) {
     this.fileNameSuffixDateFormat = fileNameSuffixDateFormat;
   }
 
-  public SolrParameters getSolr() {
+  public SolrProperties getSolr() {
     return solr;
   }
 
-  public void setSolr(SolrParameters solr) {
+  public void setSolr(SolrProperties solr) {
     this.solr = solr;
   }
 
@@ -272,4 +276,36 @@ public void validate() {
     requireNonNull(solr, "No solr query was specified for archiving job!");
     solr.validate();
   }
+
+  @Override
+  public ArchivingProperties merge(JobParameters jobParameters) {
+    ArchivingProperties archivingProperties = new ArchivingProperties();
+    archivingProperties.setReadBlockSize(getIntJobParameter(jobParameters, "readBlockSize", readBlockSize));
+    archivingProperties.setWriteBlockSize(getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize));
+    archivingProperties.setDestination(ExportDestination.valueOf(jobParameters.getString("destination", destination.name())));
+    archivingProperties.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
+    archivingProperties.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
+    archivingProperties.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
+    archivingProperties.setS3AccessFile(jobParameters.getString("s3AccessFile", s3AccessFile));
+    archivingProperties.setS3BucketName(jobParameters.getString("s3BucketName", s3BucketName));
+    archivingProperties.setS3KeyPrefix(jobParameters.getString("s3KeyPrefix", s3KeyPrefix));
+    archivingProperties.setS3Endpoint(jobParameters.getString("s3Endpoint", s3Endpoint));
+    archivingProperties.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
+    archivingProperties.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
+    archivingProperties.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
+    archivingProperties.setHdfsKerberosPrincipal(jobParameters.getString("hdfsKerberosPrincipal", hdfsKerberosPrincipal));
+    archivingProperties.setHdfsKerberosKeytabPath(jobParameters.getString("hdfsKerberosKeytabPath", hdfsKerberosKeytabPath));
+    archivingProperties.setSolr(solr.merge(jobParameters));
+    archivingProperties.setStart(jobParameters.getString("start"));
+    archivingProperties.setEnd(jobParameters.getString("end"));
+    archivingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return archivingProperties;
+  }
+
+  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
+    String valueText = jobParameters.getString(parameterName);
+    if (isBlank(valueText))
+      return defaultValue;
+    return Integer.parseInt(valueText);
+  }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
index af522d3b..319cc5b1 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
@@ -32,8 +32,9 @@
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.apache.ambari.infra.job.JobScheduler;
 import org.apache.ambari.infra.job.ObjectSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.Job;
 import org.springframework.batch.core.Step;
 import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
@@ -48,8 +49,8 @@
 import org.springframework.context.annotation.Configuration;
 
 @Configuration
-public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<DocumentArchivingProperties, ArchivingParameters> {
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentArchivingConfiguration.class);
+public class DocumentArchivingConfiguration extends AbstractJobsConfiguration<ArchivingProperties, ArchivingProperties> {
+  private static final Logger logger = LogManager.getLogger(DocumentArchivingConfiguration.class);
   private static final DocumentWiper NOT_DELETE = (firstDocument, lastDocument) -> { };
 
   private final StepBuilderFactory steps;
@@ -85,7 +86,7 @@ public Step exportStep(DocumentExporter documentExporter) {
   @StepScope
   public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
                                            @Value("#{stepExecution.jobExecution.jobId}") String jobId,
-                                           @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+                                           @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                            InfraManagerDataConfig infraManagerDataConfig,
                                            @Value("#{jobParameters[end]}") String intervalEnd,
                                            DocumentWiper documentWiper,
@@ -118,10 +119,10 @@ public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
                     parameters.getSolr().getCollection(),
                     jobId,
                     isBlank(intervalEnd) ? "" : fileNameSuffixFormatter.format(intervalEnd)));
-    LOG.info("Destination directory path={}", destinationDirectory);
+    logger.info("Destination directory path={}", destinationDirectory);
     if (!destinationDirectory.exists()) {
       if (!destinationDirectory.mkdirs()) {
-        LOG.warn("Unable to create directory {}", destinationDirectory);
+        logger.warn("Unable to create directory {}", destinationDirectory);
       }
     }
 
@@ -134,7 +135,7 @@ public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
 
   @Bean
   @StepScope
-  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+  public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                      SolrDAO solrDAO) {
     if (isBlank(parameters.getSolr().getDeleteQueryText()))
       return NOT_DELETE;
@@ -143,26 +144,26 @@ public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executio
 
   @Bean
   @StepScope
-  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters) {
+  public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters) {
     return new SolrDAO(parameters.getSolr());
   }
 
   private File outFile(String collection, File directoryPath, String suffix) {
     File file = new File(directoryPath, String.format("%s_-_%s.json", collection, suffix));
-    LOG.info("Exporting to temp file {}", file.getAbsolutePath());
+    logger.info("Exporting to temp file {}", file.getAbsolutePath());
     return file;
   }
 
   @Bean
   @StepScope
   public DocumentItemReader reader(ObjectSource<Document> documentSource,
-                                   @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters properties) {
+                                   @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties properties) {
     return new DocumentItemReader(documentSource, properties.getReadBlockSize());
   }
 
   @Bean
   @StepScope
-  public ObjectSource<Document> documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+  public ObjectSource<Document> documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") ArchivingProperties parameters,
                                                SolrDAO solrDAO) {
 
     return new SolrDocumentSource(solrDAO, parameters.getStart(), computeEnd(parameters.getEnd(), parameters.getTtl()));
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
deleted file mode 100644
index 8ad576c4..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
-import static org.apache.ambari.infra.json.StringToFsPermissionConverter.toFsPermission;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.apache.ambari.infra.json.FsPermissionToStringConverter;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.springframework.batch.core.JobParameters;
-
-public class DocumentArchivingProperties extends JobProperties<ArchivingParameters> {
-  private int readBlockSize;
-  private int writeBlockSize;
-  private ExportDestination destination;
-  private String localDestinationDirectory;
-  private String fileNameSuffixColumn;
-  private String fileNameSuffixDateFormat;
-  private Duration ttl;
-  private SolrProperties solr;
-
-  private String s3AccessFile;
-  private String s3KeyPrefix;
-  private String s3BucketName;
-  private String s3Endpoint;
-
-  private String hdfsEndpoint;
-  private String hdfsDestinationDirectory;
-  private FsPermission hdfsFilePermission;
-  private String hdfsKerberosPrincipal;
-  private String hdfsKerberosKeytabPath;
-
-  public int getReadBlockSize() {
-    return readBlockSize;
-  }
-
-  public void setReadBlockSize(int readBlockSize) {
-    this.readBlockSize = readBlockSize;
-  }
-
-  public int getWriteBlockSize() {
-    return writeBlockSize;
-  }
-
-  public void setWriteBlockSize(int writeBlockSize) {
-    this.writeBlockSize = writeBlockSize;
-  }
-
-  public ExportDestination getDestination() {
-    return destination;
-  }
-
-  public void setDestination(ExportDestination destination) {
-    this.destination = destination;
-  }
-
-  public String getLocalDestinationDirectory() {
-    return localDestinationDirectory;
-  }
-
-  public void setLocalDestinationDirectory(String localDestinationDirectory) {
-    this.localDestinationDirectory = localDestinationDirectory;
-  }
-
-  public String getFileNameSuffixColumn() {
-    return fileNameSuffixColumn;
-  }
-
-  public void setFileNameSuffixColumn(String fileNameSuffixColumn) {
-    this.fileNameSuffixColumn = fileNameSuffixColumn;
-  }
-
-  public String getFileNameSuffixDateFormat() {
-    return fileNameSuffixDateFormat;
-  }
-
-  public void setFileNameSuffixDateFormat(String fileNameSuffixDateFormat) {
-    this.fileNameSuffixDateFormat = fileNameSuffixDateFormat;
-  }
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  public SolrProperties getSolr() {
-    return solr;
-  }
-
-  public void setSolr(SolrProperties query) {
-    this.solr = query;
-  }
-
-  public String getS3AccessFile() {
-    return s3AccessFile;
-  }
-
-  public void setS3AccessFile(String s3AccessFile) {
-    this.s3AccessFile = s3AccessFile;
-  }
-
-  public String getS3KeyPrefix() {
-    return s3KeyPrefix;
-  }
-
-  public void setS3KeyPrefix(String s3KeyPrefix) {
-    this.s3KeyPrefix = s3KeyPrefix;
-  }
-
-  public String getS3BucketName() {
-    return s3BucketName;
-  }
-
-  public void setS3BucketName(String s3BucketName) {
-    this.s3BucketName = s3BucketName;
-  }
-
-  public String getS3Endpoint() {
-    return s3Endpoint;
-  }
-
-  public void setS3Endpoint(String s3Endpoint) {
-    this.s3Endpoint = s3Endpoint;
-  }
-
-  public String getHdfsEndpoint() {
-    return hdfsEndpoint;
-  }
-
-  public void setHdfsEndpoint(String hdfsEndpoint) {
-    this.hdfsEndpoint = hdfsEndpoint;
-  }
-
-  public String getHdfsDestinationDirectory() {
-    return hdfsDestinationDirectory;
-  }
-
-  public FsPermission getHdfsFilePermission() {
-    return hdfsFilePermission;
-  }
-
-  public void setHdfsFilePermission(FsPermission hdfsFilePermission) {
-    this.hdfsFilePermission = hdfsFilePermission;
-  }
-
-  public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) {
-    this.hdfsDestinationDirectory = hdfsDestinationDirectory;
-  }
-
-  public String getHdfsKerberosPrincipal() {
-    return hdfsKerberosPrincipal;
-  }
-
-  public void setHdfsKerberosPrincipal(String hdfsKerberosPrincipal) {
-    this.hdfsKerberosPrincipal = hdfsKerberosPrincipal;
-  }
-
-  public String getHdfsKerberosKeytabPath() {
-    return hdfsKerberosKeytabPath;
-  }
-
-  public void setHdfsKerberosKeytabPath(String hdfsKerberosKeytabPath) {
-    this.hdfsKerberosKeytabPath = hdfsKerberosKeytabPath;
-  }
-
-  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
-    String valueText = jobParameters.getString(parameterName);
-    if (isBlank(valueText))
-      return defaultValue;
-    return Integer.parseInt(valueText);
-  }
-
-  @Override
-  public ArchivingParameters merge(JobParameters jobParameters) {
-    ArchivingParameters archivingParameters = new ArchivingParameters();
-    archivingParameters.setReadBlockSize(getIntJobParameter(jobParameters, "readBlockSize", readBlockSize));
-    archivingParameters.setWriteBlockSize(getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize));
-    archivingParameters.setDestination(ExportDestination.valueOf(jobParameters.getString("destination", destination.name())));
-    archivingParameters.setLocalDestinationDirectory(jobParameters.getString("localDestinationDirectory", localDestinationDirectory));
-    archivingParameters.setFileNameSuffixColumn(jobParameters.getString("fileNameSuffixColumn", fileNameSuffixColumn));
-    archivingParameters.setFileNameSuffixDateFormat(jobParameters.getString("fileNameSuffixDateFormat", fileNameSuffixDateFormat));
-    archivingParameters.setS3AccessFile(jobParameters.getString("s3AccessFile", s3AccessFile));
-    archivingParameters.setS3BucketName(jobParameters.getString("s3BucketName", s3BucketName));
-    archivingParameters.setS3KeyPrefix(jobParameters.getString("s3KeyPrefix", s3KeyPrefix));
-    archivingParameters.setS3Endpoint(jobParameters.getString("s3Endpoint", s3Endpoint));
-    archivingParameters.setHdfsEndpoint(jobParameters.getString("hdfsEndpoint", hdfsEndpoint));
-    archivingParameters.setHdfsDestinationDirectory(jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory));
-    archivingParameters.setHdfsFilePermission(toFsPermission(jobParameters.getString("hdfsFilePermission", FsPermissionToStringConverter.toString(hdfsFilePermission))));
-    archivingParameters.setHdfsKerberosPrincipal(jobParameters.getString("hdfsKerberosPrincipal", hdfsKerberosPrincipal));
-    archivingParameters.setHdfsKerberosKeytabPath(jobParameters.getString("hdfsKerberosKeytabPath", hdfsKerberosKeytabPath));
-    archivingParameters.setSolr(solr.merge(jobParameters));
-    archivingParameters.setStart(jobParameters.getString("start"));
-    archivingParameters.setEnd(jobParameters.getString("end"));
-    archivingParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return archivingParameters;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
index a009031e..253fc40d 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentArchivingPropertyMap {
-  private Map<String, DocumentArchivingProperties> solrDataArchiving;
+  private Map<String, ArchivingProperties> solrDataArchiving;
 
-  public Map<String, DocumentArchivingProperties> getSolrDataArchiving() {
+  public Map<String, ArchivingProperties> getSolrDataArchiving() {
     return solrDataArchiving;
   }
 
-  public void setSolrDataArchiving(Map<String, DocumentArchivingProperties> solrDataArchiving) {
+  public void setSolrDataArchiving(Map<String, ArchivingProperties> solrDataArchiving) {
     this.solrDataArchiving = solrDataArchiving;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
index d87fdea7..f61746d2 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
@@ -19,8 +19,8 @@
 package org.apache.ambari.infra.job.archive;
 
 import org.apache.ambari.infra.job.JobContextRepository;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.BatchStatus;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepContribution;
@@ -34,7 +34,7 @@
 
 public class DocumentExporter implements Tasklet, StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DocumentExporter.class);
+  private static final Logger logger = LogManager.getLogger(DocumentExporter.class);
 
   private boolean complete = false;
   private final ItemStreamReader<Document> documentReader;
@@ -78,7 +78,7 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkCon
         if (writer != null && writtenCount >= writeBlockSize) {
           stepExecution = jobContextRepository.getStepExecution(stepExecution.getJobExecutionId(), stepExecution.getId());
           if (stepExecution.getJobExecution().getStatus() == BatchStatus.STOPPING) {
-            LOG.info("Received stop signal.");
+            logger.info("Received stop signal.");
             writer.revert();
             writer = null;
             return RepeatStatus.CONTINUABLE;
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
index b15d8b76..65b518ff 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
@@ -28,7 +28,7 @@
 public class FileNameSuffixFormatter {
   public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern(SOLR_DATE_FORMAT_TEXT);
 
-  public static FileNameSuffixFormatter from(ArchivingParameters properties) {
+  public static FileNameSuffixFormatter from(ArchivingProperties properties) {
     return new FileNameSuffixFormatter(properties.getFileNameSuffixColumn(), properties.getFileNameSuffixDateFormat());
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
index 531d2d57..1cf5b652 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
@@ -18,15 +18,23 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UncheckedIOException;
+import java.io.UnsupportedEncodingException;
+
 import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
-import java.io.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class LocalDocumentItemWriter implements DocumentItemWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(LocalDocumentItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(LocalDocumentItemWriter.class);
 
   private static final ObjectMapper json = new ObjectMapper();
   private static final String ENCODING = "UTF-8";
@@ -69,7 +77,7 @@ public void write(Document document) {
   public void revert() {
     IOUtils.closeQuietly(bufferedWriter);
     if (!outFile.delete())
-      LOG.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
+      logger.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
   }
 
   @Override
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
index 879b58bc..7c4de52a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
@@ -18,11 +18,7 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static org.apache.commons.csv.CSVFormat.DEFAULT;
 
 import java.io.FileNotFoundException;
 import java.io.FileReader;
@@ -34,10 +30,14 @@
 import java.util.Map;
 import java.util.Optional;
 
-import static org.apache.commons.csv.CSVFormat.DEFAULT;
+import org.apache.ambari.infra.conf.security.PasswordStore;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class S3AccessCsv implements PasswordStore {
-  private static final Logger LOG = LoggerFactory.getLogger(S3AccessCsv.class);
+  private static final Logger logger = LogManager.getLogger(S3AccessCsv.class);
 
   public static S3AccessCsv file(String path) {
     try {
@@ -77,7 +77,7 @@ public S3AccessCsv(Reader reader) {
     } catch (IOException e) {
       throw new UncheckedIOException(e);
     } catch (S3AccessCsvFormatException e) {
-      LOG.warn("Unable to parse csv file: {}", e.getMessage());
+      logger.warn("Unable to parse csv file: {}", e.getMessage());
     }
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
index 76aa734f..3e1310a3 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
@@ -11,8 +11,8 @@
 
 import org.apache.ambari.infra.conf.security.CompositePasswordStore;
 import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.xmlpull.v1.XmlPullParserException;
 
 import io.minio.MinioClient;
@@ -46,14 +46,14 @@
  */
 public class S3Uploader extends AbstractFileAction {
 
-  private static final Logger LOG = LoggerFactory.getLogger(S3Uploader.class);
+  private static final Logger logger = LogManager.getLogger(S3Uploader.class);
 
   private final MinioClient client;
   private final String keyPrefix;
   private final String bucketName;
 
   public S3Uploader(S3Properties s3Properties, PasswordStore passwordStore) {
-    LOG.info("Initializing S3 client with " + s3Properties);
+    logger.info("Initializing S3 client with " + s3Properties);
 
     this.keyPrefix = s3Properties.getS3KeyPrefix();
     this.bucketName = s3Properties.getS3BucketName();
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
index 7f8fd07e..ed77a063 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
@@ -22,19 +22,19 @@
 import java.io.UncheckedIOException;
 
 import org.apache.ambari.infra.job.SolrDAOBase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class SolrDAO extends SolrDAOBase implements DocumentWiper {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrDAO.class);
+  private static final Logger logger = LogManager.getLogger(SolrDAO.class);
 
-  private final SolrParameters queryProperties;
+  private final SolrProperties queryProperties;
 
-  public SolrDAO(SolrParameters queryProperties) {
+  public SolrDAO(SolrProperties queryProperties) {
     super(queryProperties.getZooKeeperConnectionString(), queryProperties.getCollection());
     this.queryProperties = queryProperties;
   }
@@ -53,7 +53,7 @@ public SolrDocumentIterator query(String start, String end, Document subInterval
             .build();
     query.setRows(rows);
 
-    LOG.info("Executing solr query {}", query.toLocalParamsString());
+    logger.info("Executing solr query {}", query.toLocalParamsString());
 
     try {
       CloudSolrClient client = createClient();
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java
deleted file mode 100644
index a793c9b0..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParameters.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class SolrParameters {
-  private String zooKeeperConnectionString;
-  private String collection;
-  private String queryText;
-  private String filterQueryText;
-  private String[] sortColumn;
-  private String deleteQueryText;
-
-  public String getZooKeeperConnectionString() {
-    return zooKeeperConnectionString;
-  }
-
-  public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
-    this.zooKeeperConnectionString = zooKeeperConnectionString;
-  }
-
-  public String getCollection() {
-    return collection;
-  }
-
-  public void setCollection(String collection) {
-    this.collection = collection;
-  }
-
-  public String getQueryText() {
-    return queryText;
-  }
-
-  public void setQueryText(String queryText) {
-    this.queryText = queryText;
-  }
-
-  public String getFilterQueryText() {
-    return filterQueryText;
-  }
-
-  public void setFilterQueryText(String filterQueryText) {
-    this.filterQueryText = filterQueryText;
-  }
-
-  public String[] getSortColumn() {
-    return sortColumn;
-  }
-
-  public void setSortColumn(String[] sortColumn) {
-    this.sortColumn = sortColumn;
-  }
-
-  public String getDeleteQueryText() {
-    return deleteQueryText;
-  }
-
-  public void setDeleteQueryText(String deleteQueryText) {
-    this.deleteQueryText = deleteQueryText;
-  }
-
-  public SolrQueryBuilder toQueryBuilder() {
-    return new SolrQueryBuilder().
-            setQueryText(queryText)
-            .setFilterQueryText(filterQueryText)
-            .addSort(sortColumn);
-  }
-
-  public void validate() {
-    if (isBlank(zooKeeperConnectionString))
-      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
-
-    if (isBlank(collection))
-      throw new IllegalArgumentException("The property collection can not be null or empty string!");
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
index 43e871f4..cf26cda1 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
@@ -18,6 +18,8 @@
  */
 package org.apache.ambari.infra.job.archive;
 
+import static org.apache.commons.lang.StringUtils.isBlank;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -79,13 +81,28 @@ public void setDeleteQueryText(String deleteQueryText) {
     this.deleteQueryText = deleteQueryText;
   }
 
-  public SolrParameters merge(JobParameters jobParameters) {
-    SolrParameters solrParameters = new SolrParameters();
-    solrParameters.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
-    solrParameters.setCollection(jobParameters.getString("collection", collection));
-    solrParameters.setQueryText(jobParameters.getString("queryText", queryText));
-    solrParameters.setFilterQueryText(jobParameters.getString("filterQueryText", filterQueryText));
-    solrParameters.setDeleteQueryText(jobParameters.getString("deleteQueryText", deleteQueryText));
+  public SolrQueryBuilder toQueryBuilder() {
+    return new SolrQueryBuilder().
+            setQueryText(queryText)
+            .setFilterQueryText(filterQueryText)
+            .addSort(sortColumn);
+  }
+
+  public void validate() {
+    if (isBlank(zooKeeperConnectionString))
+      throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
+
+    if (isBlank(collection))
+      throw new IllegalArgumentException("The property collection can not be null or empty string!");
+  }
+
+  public SolrProperties merge(JobParameters jobParameters) {
+    SolrProperties solrProperties = new SolrProperties();
+    solrProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    solrProperties.setCollection(jobParameters.getString("collection", collection));
+    solrProperties.setQueryText(jobParameters.getString("queryText", queryText));
+    solrProperties.setFilterQueryText(jobParameters.getString("filterQueryText", filterQueryText));
+    solrProperties.setDeleteQueryText(jobParameters.getString("deleteQueryText", deleteQueryText));
 
     String sortValue;
     List<String> sortColumns = new ArrayList<>();
@@ -95,12 +112,12 @@ public SolrParameters merge(JobParameters jobParameters) {
       ++i;
     }
     if (!sortColumns.isEmpty()) {
-      solrParameters.setSortColumn(sortColumns.toArray(new String[0]));
+      solrProperties.setSortColumn(sortColumns.toArray(new String[0]));
     }
     else {
-      solrParameters.setSortColumn(sortColumn);
+      solrProperties.setSortColumn(sortColumn);
     }
 
-    return solrParameters;
+    return solrProperties;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
index 27f61fad..d6faa70a 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpConfiguration.java
@@ -73,7 +73,7 @@ protected Step cleanUpStep(TaskHistoryWiper taskHistoryWiper) {
   @StepScope
   protected TaskHistoryWiper taskHistoryWiper(
           InfraJobExecutionDao infraJobExecutionDao,
-          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") CleanUpParameters cleanUpParameters) {
-    return new TaskHistoryWiper(infraJobExecutionDao, cleanUpParameters.getTtl());
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") CleanUpProperties cleanUpProperties) {
+    return new TaskHistoryWiper(infraJobExecutionDao, cleanUpProperties.getTtl());
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java
deleted file mode 100644
index a4f21417..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpParameters.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.cleanup;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.Validatable;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.apache.ambari.infra.json.StringToDurationConverter;
-
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-
-public class CleanUpParameters implements Validatable {
-
-  @JsonSerialize(converter = DurationToStringConverter.class)
-  @JsonDeserialize(converter = StringToDurationConverter.class)
-  private Duration ttl;
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  @Override
-  public void validate() {
-
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
index 7bf9808e..b29dcc15 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/CleanUpProperties.java
@@ -23,15 +23,22 @@
 import java.time.Duration;
 
 import org.apache.ambari.infra.job.JobProperties;
+import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
+import org.apache.ambari.infra.json.StringToDurationConverter;
 import org.springframework.batch.core.JobParameters;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs.clean-up")
-public class CleanUpProperties extends JobProperties<CleanUpParameters> {
+public class CleanUpProperties extends JobProperties<CleanUpProperties> implements Validatable {
 
+  @JsonSerialize(converter = DurationToStringConverter.class)
+  @JsonDeserialize(converter = StringToDurationConverter.class)
   private Duration ttl;
 
   protected CleanUpProperties() {
@@ -47,9 +54,14 @@ public void setTtl(Duration ttl) {
   }
 
   @Override
-  public CleanUpParameters merge(JobParameters jobParameters) {
-    CleanUpParameters cleanUpParameters = new CleanUpParameters();
-    cleanUpParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return cleanUpParameters;
+  public void validate() {
+
+  }
+
+  @Override
+  public CleanUpProperties merge(JobParameters jobParameters) {
+    CleanUpProperties cleanUpProperties = new CleanUpProperties();
+    cleanUpProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return cleanUpProperties;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
index 594515e5..5a296ded 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/cleanup/TaskHistoryWiper.java
@@ -22,8 +22,8 @@
 import java.time.OffsetDateTime;
 
 import org.apache.ambari.infra.job.InfraJobExecutionDao;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.StepContribution;
 import org.springframework.batch.core.scope.context.ChunkContext;
 import org.springframework.batch.core.step.tasklet.Tasklet;
@@ -31,7 +31,7 @@
 
 public class TaskHistoryWiper implements Tasklet {
 
-  private static final Logger logger = LoggerFactory.getLogger(TaskHistoryWiper.class);
+  private static final Logger logger = LogManager.getLogger(TaskHistoryWiper.class);
   public static final Duration DEFAULT_TTL = Duration.ofHours(1);
 
   private final InfraJobExecutionDao infraJobExecutionDao;
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
similarity index 73%
rename from ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java
rename to ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
index 71d98e14..be4e6516 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingParameters.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DeletingProperties.java
@@ -18,18 +18,21 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.time.Duration;
 
+import org.apache.ambari.infra.job.JobProperties;
 import org.apache.ambari.infra.job.Validatable;
 import org.apache.ambari.infra.json.DurationToStringConverter;
 import org.apache.ambari.infra.json.StringToDurationConverter;
+import org.springframework.batch.core.JobParameters;
 
 import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-public class DeletingParameters implements Validatable {
+public class DeletingProperties extends JobProperties<DeletingProperties> implements Validatable {
   private String zooKeeperConnectionString;
   private String collection;
   private String filterField;
@@ -98,4 +101,16 @@ public void validate() {
     if (isBlank(filterField))
       throw new IllegalArgumentException("The property filterField can not be null or empty string!");
   }
+
+  @Override
+  public DeletingProperties merge(JobParameters jobParameters) {
+    DeletingProperties deletingProperties = new DeletingProperties();
+    deletingProperties.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
+    deletingProperties.setCollection(jobParameters.getString("collection", collection));
+    deletingProperties.setFilterField(jobParameters.getString("filterField", filterField));
+    deletingProperties.setStart(jobParameters.getString("start", "*"));
+    deletingProperties.setEnd(jobParameters.getString("end", "*"));
+    deletingProperties.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
+    return deletingProperties;
+  }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
index f9a782cb..1d6fb9a5 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
@@ -38,7 +38,7 @@
 import org.springframework.context.annotation.Configuration;
 
 @Configuration
-public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DocumentDeletingProperties, DeletingParameters> {
+public class DocumentDeletingConfiguration extends AbstractJobsConfiguration<DeletingProperties, DeletingProperties> {
 
   private final StepBuilderFactory steps;
   private final Step deleteStep;
@@ -72,7 +72,7 @@ public Step deleteStep(DocumentWiperTasklet tasklet) {
   @Bean
   @StepScope
   public DocumentWiperTasklet documentWiperTasklet(
-          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") DeletingParameters parameters) {
+          @Value("#{stepExecution.jobExecution.executionContext.get('" + PARAMETERS_CONTEXT_KEY + "')}") DeletingProperties parameters) {
     return new DocumentWiperTasklet(parameters);
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
deleted file mode 100644
index e7ecc139..00000000
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import static org.apache.ambari.infra.json.StringToDurationConverter.toDuration;
-
-import java.time.Duration;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.apache.ambari.infra.json.DurationToStringConverter;
-import org.springframework.batch.core.JobParameters;
-
-public class DocumentDeletingProperties extends JobProperties<DeletingParameters> {
-  private String zooKeeperConnectionString;
-  private String collection;
-  private String filterField;
-  private Duration ttl;
-
-  public String getZooKeeperConnectionString() {
-    return zooKeeperConnectionString;
-  }
-
-  public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
-    this.zooKeeperConnectionString = zooKeeperConnectionString;
-  }
-
-  public String getCollection() {
-    return collection;
-  }
-
-  public void setCollection(String collection) {
-    this.collection = collection;
-  }
-
-  public String getFilterField() {
-    return filterField;
-  }
-
-  public void setFilterField(String filterField) {
-    this.filterField = filterField;
-  }
-
-  public Duration getTtl() {
-    return ttl;
-  }
-
-  public void setTtl(Duration ttl) {
-    this.ttl = ttl;
-  }
-
-  @Override
-  public DeletingParameters merge(JobParameters jobParameters) {
-    DeletingParameters deletingParameters = new DeletingParameters();
-    deletingParameters.setZooKeeperConnectionString(jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString));
-    deletingParameters.setCollection(jobParameters.getString("collection", collection));
-    deletingParameters.setFilterField(jobParameters.getString("filterField", filterField));
-    deletingParameters.setStart(jobParameters.getString("start", "*"));
-    deletingParameters.setEnd(jobParameters.getString("end", "*"));
-    deletingParameters.setTtl(toDuration(jobParameters.getString("ttl", DurationToStringConverter.toString(ttl))));
-    return deletingParameters;
-  }
-}
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
index 1dc0caf9..92d04281 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
@@ -18,21 +18,21 @@
  */
 package org.apache.ambari.infra.job.deleting;
 
+import java.util.Map;
+
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.Map;
-
 @Configuration
 @ConfigurationProperties(prefix = "infra-manager.jobs")
 public class DocumentDeletingPropertyMap {
-  private Map<String, DocumentDeletingProperties> solrDataDeleting;
+  private Map<String, DeletingProperties> solrDataDeleting;
 
-  public Map<String, DocumentDeletingProperties> getSolrDataDeleting() {
+  public Map<String, DeletingProperties> getSolrDataDeleting() {
     return solrDataDeleting;
   }
 
-  public void setSolrDataDeleting(Map<String, DocumentDeletingProperties> solrDataDeleting) {
+  public void setSolrDataDeleting(Map<String, DeletingProperties> solrDataDeleting) {
     this.solrDataDeleting = solrDataDeleting;
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
index 69d8c623..cae64f4c 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
@@ -28,11 +28,11 @@
 import org.springframework.batch.repeat.RepeatStatus;
 
 public class DocumentWiperTasklet extends SolrDAOBase implements Tasklet {
-  private final DeletingParameters parameters;
+  private final DeletingProperties parameters;
 
-  public DocumentWiperTasklet(DeletingParameters deletingParameters) {
-    super(deletingParameters.getZooKeeperConnectionString(), deletingParameters.getCollection());
-    parameters = deletingParameters;
+  public DocumentWiperTasklet(DeletingProperties deletingProperties) {
+    super(deletingProperties.getZooKeeperConnectionString(), deletingProperties.getCollection());
+    parameters = deletingProperties;
   }
 
   @Override
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
index a124e4d1..ff5ad6dc 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
@@ -18,17 +18,17 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.item.ItemProcessor;
 
 public class DummyItemProcessor implements ItemProcessor<DummyObject, String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemProcessor.class);
 
   @Override
   public String process(DummyObject input) throws Exception {
-    LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
+    logger.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
     Thread.sleep(10000);
     return String.format("%s, %s", input.getF1(), input.getF2());
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
index 89ad0130..3adb91a3 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
@@ -18,14 +18,6 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.annotation.BeforeStep;
-import org.springframework.batch.item.ItemWriter;
-
-import javax.inject.Inject;
 import java.io.File;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -33,9 +25,18 @@
 import java.util.Date;
 import java.util.List;
 
+import javax.inject.Inject;
+
+import org.apache.ambari.infra.conf.InfraManagerDataConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+import org.springframework.batch.item.ItemWriter;
+
 public class DummyItemWriter implements ItemWriter<String> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class);
+  private static final Logger logger = LogManager.getLogger(DummyItemWriter.class);
 
   private StepExecution stepExecution;
 
@@ -44,17 +45,17 @@
 
   @Override
   public void write(List<? extends String> values) throws Exception {
-    LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
+    logger.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
     Thread.sleep(1000);
     String outputDirectoryLocation = String.format("%s%s%s%s", infraManagerDataConfig.getDataFolder(), File.separator, "dummyOutput-", new Date().getTime());
     Path pathToDirectory = Paths.get(outputDirectoryLocation);
     Path pathToFile = Paths.get(String.format("%s%s%s", outputDirectoryLocation, File.separator, "dummyOutput.txt"));
     Files.createDirectories(pathToDirectory);
-    LOG.info("Write location to step execution context...");
+    logger.info("Write location to step execution context...");
     stepExecution.getExecutionContext().put("stepOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write location to job execution context...");
+    logger.info("Write location to job execution context...");
     stepExecution.getJobExecution().getExecutionContext().put("jobOutputLocation", pathToFile.toAbsolutePath().toString());
-    LOG.info("Write to file: {}", pathToFile.toAbsolutePath());
+    logger.info("Write to file: {}", pathToFile.toAbsolutePath());
     Files.write(pathToFile, values.toString().getBytes());
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
index 99c50e80..4602b9bd 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
@@ -18,28 +18,28 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.JobExecution;
 import org.springframework.batch.core.JobExecutionListener;
 
 public class DummyJobListener implements JobExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyJobListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyJobListener.class);
 
   @Override
   public void beforeJob(JobExecution jobExecution) {
-    LOG.info("Dummy - before job execution");
+    logger.info("Dummy - before job execution");
   }
 
   @Override
   public void afterJob(JobExecution jobExecution) {
-    LOG.info("Dummy - after job execution");
+    logger.info("Dummy - after job execution");
     if (jobExecution.getExecutionContext().get("jobOutputLocation") != null) {
       String jobOutputLocation = (String) jobExecution.getExecutionContext().get("jobOutputLocation");
       String exitDescription = "file://" + jobOutputLocation;
-      LOG.info("Add exit description '{}'", exitDescription);
+      logger.info("Add exit description '{}'", exitDescription);
       jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode(), exitDescription));
     }
   }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
index 548e6504..838d8461 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
@@ -18,24 +18,24 @@
  */
 package org.apache.ambari.infra.job.dummy;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.StepExecution;
 import org.springframework.batch.core.StepExecutionListener;
 
 public class DummyStepListener implements StepExecutionListener {
 
-  private static final Logger LOG = LoggerFactory.getLogger(DummyStepListener.class);
+  private static final Logger logger = LogManager.getLogger(DummyStepListener.class);
 
   @Override
   public void beforeStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - before step execution");
+    logger.info("Dummy step - before step execution");
   }
 
   @Override
   public ExitStatus afterStep(StepExecution stepExecution) {
-    LOG.info("Dummy step - after step execution");
+    logger.info("Dummy step - after step execution");
     return stepExecution.getExitStatus();
   }
 }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
index 64ba21fd..86ffc1a5 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
@@ -40,8 +40,8 @@
 import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.history.StepExecutionHistory;
 import org.springframework.batch.admin.service.JobService;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
@@ -67,7 +67,7 @@
 @Named
 public class JobManager implements Jobs {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
+  private static final Logger logger = LogManager.getLogger(JobManager.class);
 
   @Inject
   private JobService jobService;
@@ -117,7 +117,7 @@ public void stopAndAbandon(Long jobExecutionId) throws NoSuchJobExecutionExcepti
     try {
       jobService.stop(jobExecutionId);
     } catch (JobExecutionNotRunningException e) {
-      LOG.warn(String.format("Job is not running jobExecutionId=%d", jobExecutionId), e.getMessage());
+      logger.warn(String.format("Job is not running jobExecutionId=%d", jobExecutionId), e.getMessage());
     }
     jobService.abandon(jobExecutionId);
   }
@@ -162,7 +162,7 @@ public JobExecutionInfoResponse stopOrAbandonJobByExecutionId(Long jobExecutionI
     } else {
       throw new UnsupportedOperationException("Unsupported operaration");
     }
-    LOG.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
+    logger.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
     return new JobExecutionInfoResponse(jobExecution, timeZone);
   }
 
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
index 079cce3e..9f47bddf 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
@@ -19,9 +19,17 @@
 package org.apache.ambari.infra.rest;
 
 
-import com.google.common.collect.Maps;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.Map;
+
+import javax.batch.operations.JobExecutionAlreadyCompleteException;
+import javax.inject.Named;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.core.JobParametersInvalidException;
 import org.springframework.batch.core.launch.JobExecutionNotFailedException;
@@ -38,19 +46,13 @@
 import org.springframework.batch.core.step.NoSuchStepException;
 import org.springframework.web.bind.MethodArgumentNotValidException;
 
-import javax.batch.operations.JobExecutionAlreadyCompleteException;
-import javax.inject.Named;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.Map;
+import com.google.common.collect.Maps;
 
 @Named
 @Provider
 public class JobExceptionMapper implements ExceptionMapper<Throwable> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class);
+  private static final Logger logger = LogManager.getLogger(JobExceptionMapper.class);
 
   private static final Map<Class, Response.Status> exceptionStatusCodeMap = Maps.newHashMap();
 
@@ -75,13 +77,13 @@
 
   @Override
   public Response toResponse(Throwable throwable) {
-    LOG.error("REST Exception occurred:", throwable);
+    logger.error("REST Exception occurred:", throwable);
     Response.Status status = Response.Status.INTERNAL_SERVER_ERROR;
 
     for (Map.Entry<Class, Response.Status> entry : exceptionStatusCodeMap.entrySet()) {
       if (throwable.getClass().isAssignableFrom(entry.getKey())) {
         status = entry.getValue();
-        LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
+        logger.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
         break;
       }
     }
diff --git a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
index 7e353fbe..43f7c41f 100644
--- a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
+++ b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
@@ -48,8 +48,8 @@
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
 import org.apache.ambari.infra.model.StepExecutionRequest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.admin.web.JobInfo;
 import org.springframework.batch.core.JobParametersBuilder;
@@ -74,7 +74,7 @@
 @Named
 @Scope("request")
 public class JobResource {
-  private static final Logger LOG = LoggerFactory.getLogger(JobResource.class);
+  private static final Logger logger = LogManager.getLogger(JobResource.class);
 
   @Inject
   private JobManager jobManager;
@@ -98,7 +98,7 @@ public JobExecutionInfoResponse startJob(@BeanParam @Valid JobInstanceStartReque
     String params = request.getParams();
     JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
     if (params != null) {
-      LOG.info("Parsing parameters of job {} '{}'", jobName, params);
+      logger.info("Parsing parameters of job {} '{}'", jobName, params);
       Splitter.on(',')
               .trimResults()
               .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults())
diff --git a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
index 71d25b63..d4b4313c 100644
--- a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
+++ b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
@@ -36,7 +36,7 @@ public void testMergeSortColumns() {
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn().length, is(2));
     assertThat(solrParameters.getSortColumn()[0], is("logtime"));
     assertThat(solrParameters.getSortColumn()[1], is("id"));
@@ -48,7 +48,7 @@ public void testMergeWhenNoSortIsDefined() {
             .toJobParameters();
 
     SolrProperties solrProperties = new SolrProperties();
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn(), is(nullValue()));
   }
 
@@ -59,7 +59,7 @@ public void testMergeWhenPropertiesAreDefinedButJobParamsAreNot() {
 
     SolrProperties solrProperties = new SolrProperties();
     solrProperties.setSortColumn(new String[] {"testColumn"});
-    SolrParameters solrParameters = solrProperties.merge(jobParameters);
+    SolrProperties solrParameters = solrProperties.merge(jobParameters);
     assertThat(solrParameters.getSortColumn().length, is(1));
     assertThat(solrParameters.getSortColumn()[0], is("testColumn"));
   }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
index cc699854..e05586af 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
@@ -35,7 +35,7 @@
 
 public class AmbariSolrCloudCLI {
 
-  private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudCLI.class);
+  private static final Logger logger = LoggerFactory.getLogger(AmbariSolrCloudCLI.class);
 
   private static final int ZK_CLIENT_TIMEOUT = 60000; // 1 minute
   private static final int ZK_CLIENT_CONNECT_TIMEOUT = 60000; // 1 minute
@@ -700,9 +700,9 @@ private static void validateRequiredOptions(CommandLine cli, String command, Opt
 
   private static void exit(int exitCode, String message) {
     if (message != null){
-      LOG.error(message);
+      logger.error(message);
     }
-    LOG.info("Return code: {}", exitCode);
+    logger.info("Return code: {}", exitCode);
     System.exit(exitCode);
   }
 }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
index 4ef629d8..fe7e1140 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
@@ -53,7 +53,7 @@
  */
 public class AmbariSolrCloudClient {
 
-  private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
+  private static final Logger logger = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
 
   private final String zkConnectString;
   private final String collection;
@@ -127,9 +127,9 @@ public String createCollection() throws Exception {
     List<String> collections = listCollections();
     if (!collections.contains(getCollection())) {
       String collection = new CreateCollectionCommand(getRetryTimes(), getInterval()).run(this);
-      LOG.info("Collection '{}' creation request sent.", collection);
+      logger.info("Collection '{}' creation request sent.", collection);
     } else {
-      LOG.info("Collection '{}' already exits.", getCollection());
+      logger.info("Collection '{}' already exits.", getCollection());
       if (this.isImplicitRouting()) {
         createShard(null);
       }
@@ -140,7 +140,7 @@ public String createCollection() throws Exception {
   public String outputCollectionData() throws Exception {
     List<String> collections = listCollections();
     String result = new DumpCollectionsCommand(getRetryTimes(), getInterval(), collections).run(this);
-    LOG.info("Dump collections response: {}", result);
+    logger.info("Dump collections response: {}", result);
     return result;
   }
 
@@ -148,9 +148,9 @@ public String outputCollectionData() throws Exception {
    * Set cluster property in clusterprops.json.
    */
   public void setClusterProp() throws Exception {
-    LOG.info("Set cluster prop: '{}'", this.getPropName());
+    logger.info("Set cluster prop: '{}'", this.getPropName());
     String newPropValue = new SetClusterPropertyZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue);
+    logger.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue);
   }
 
   /**
@@ -159,11 +159,11 @@ public void setClusterProp() throws Exception {
   public void createZnode() throws Exception {
     boolean znodeExists = isZnodeExists(this.znode);
     if (znodeExists) {
-      LOG.info("Znode '{}' already exists.", this.znode);
+      logger.info("Znode '{}' already exists.", this.znode);
     } else {
-      LOG.info("Znode '{}' does not exist. Creating...", this.znode);
+      logger.info("Znode '{}' does not exist. Creating...", this.znode);
       String newZnode = new CreateSolrZnodeZkCommand(getRetryTimes(), getInterval()).run(this);
-      LOG.info("Znode '{}' is created successfully.", newZnode);
+      logger.info("Znode '{}' is created successfully.", newZnode);
     }
   }
 
@@ -172,20 +172,20 @@ public void createZnode() throws Exception {
    * E.g.: localhost:2181 and znode: /ambari-solr, checks existance of localhost:2181/ambari-solr
    */
   public boolean isZnodeExists(String znode) throws Exception {
-    LOG.info("Check '{}' znode exists or not", znode);
+    logger.info("Check '{}' znode exists or not", znode);
     boolean result = new CheckZnodeZkCommand(getRetryTimes(), getInterval(), znode).run(this);
     if (result) {
-      LOG.info("'{}' znode exists", znode);
+      logger.info("'{}' znode exists", znode);
     } else {
-      LOG.info("'{}' znode does not exist", znode);
+      logger.info("'{}' znode does not exist", znode);
     }
     return result;
   }
 
   public void setupKerberosPlugin() throws Exception {
-    LOG.info("Setup kerberos plugin in security.json");
+    logger.info("Setup kerberos plugin in security.json");
     new EnableKerberosPluginSolrZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("KerberosPlugin is set in security.json");
+    logger.info("KerberosPlugin is set in security.json");
   }
 
   /**
@@ -206,7 +206,7 @@ public void secureZnode() throws Exception {
    * Unsecure znode
    */
   public void unsecureZnode() throws Exception {
-    LOG.info("Disable security for znode - ", this.getZnode());
+    logger.info("Disable security for znode - ", this.getZnode());
     new UnsecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this);
   }
 
@@ -215,7 +215,7 @@ public void unsecureZnode() throws Exception {
    */
   public String uploadConfiguration() throws Exception {
     String configSet = new UploadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("'{}' is uploaded to zookeeper.", configSet);
+    logger.info("'{}' is uploaded to zookeeper.", configSet);
     return configSet;
   }
 
@@ -224,7 +224,7 @@ public String uploadConfiguration() throws Exception {
    */
   public String downloadConfiguration() throws Exception {
     String configDir = new DownloadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
-    LOG.info("Config set is download from zookeeper. ({})", configDir);
+    logger.info("Config set is download from zookeeper. ({})", configDir);
     return configDir;
   }
 
@@ -234,9 +234,9 @@ public String downloadConfiguration() throws Exception {
   public boolean configurationExists() throws Exception {
     boolean configExits = new CheckConfigZkCommand(getRetryTimes(), getInterval()).run(this);
     if (configExits) {
-      LOG.info("Config {} exits", configSet);
+      logger.info("Config {} exits", configSet);
     } else {
-      LOG.info("Configuration '{}' does not exist", configSet);
+      logger.info("Configuration '{}' does not exist", configSet);
     }
     return configExits;
   }
@@ -258,7 +258,7 @@ public boolean configurationExists() throws Exception {
       for (String shardName : shardList) {
         if (!existingShards.contains(shardName)) {
           new CreateShardCommand(shardName, getRetryTimes(), getInterval()).run(this);
-          LOG.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
+          logger.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
           existingShards.add(shardName);
         }
       }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
index 5e878595..74c04801 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
@@ -24,7 +24,7 @@
 import org.slf4j.LoggerFactory;
 
 public abstract class AbstractRetryCommand<RESPONSE> {
-  private static final Logger LOG = LoggerFactory.getLogger(AbstractRetryCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(AbstractRetryCommand.class);
 
   private final int interval;
   private final int maxRetries;
@@ -44,9 +44,9 @@ private RESPONSE retry(int tries, AmbariSolrCloudClient solrCloudClient) throws
     try {
       return createAndProcessRequest(solrCloudClient);
     } catch (Exception ex) {
-      LOG.error(ex.getMessage(), ex);
+      logger.error(ex.getMessage(), ex);
       tries++;
-      LOG.info("Command failed, tries again (tries: {})", tries);
+      logger.info("Command failed, tries again (tries: {})", tries);
       if (maxRetries == tries) {
         throw new AmbariSolrCloudClientException(String.format("Maximum retries exceeded: %d", tries), ex);
       } else {
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
index 69586239..f114b548 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
@@ -18,6 +18,10 @@
  */
 package org.apache.ambari.infra.solr.commands;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
 import org.apache.ambari.infra.solr.util.AclUtils;
 import org.apache.commons.lang.StringUtils;
@@ -31,13 +35,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand<Boolean> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class);
 
   public SecureSolrZNodeZkCommand(int maxRetries, int interval) {
     super(maxRetries, interval);
@@ -46,9 +46,8 @@ public SecureSolrZNodeZkCommand(int maxRetries, int interval) {
   @Override
   protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
     String zNode = client.getZnode();
-    List<ACL> newAclList = new ArrayList<>();
     List<ACL> saslUserList = AclUtils.createAclListFromSaslUsers(client.getSaslUsers().split(","));
-    newAclList.addAll(saslUserList);
+    List<ACL> newAclList = new ArrayList<>(saslUserList);
     newAclList.add(new ACL(ZooDefs.Perms.READ, new Id("world", "anyone")));
 
     String configsPath = String.format("%s/%s", zNode, "configs");
@@ -61,16 +60,15 @@ protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zk
 
     AclUtils.setRecursivelyOn(client.getSolrZkClient().getSolrZooKeeper(), zNode, newAclList, excludePaths);
 
-    List<ACL> commonConfigAcls = new ArrayList<>();
-    commonConfigAcls.addAll(saslUserList);
+    List<ACL> commonConfigAcls = new ArrayList<>(saslUserList);
     commonConfigAcls.add(new ACL(ZooDefs.Perms.READ | ZooDefs.Perms.CREATE, new Id("world", "anyone")));
 
-    LOG.info("Set sasl users for znode '{}' : {}", client.getZnode(), StringUtils.join(saslUserList, ","));
-    LOG.info("Skip {}/configs and {}/collections", client.getZnode(), client.getZnode());
+    logger.info("Set sasl users for znode '{}' : {}", client.getZnode(), StringUtils.join(saslUserList, ","));
+    logger.info("Skip {}/configs and {}/collections", client.getZnode(), client.getZnode());
     solrZooKeeper.setACL(configsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(configsPath, new Stat()), commonConfigAcls), -1);
     solrZooKeeper.setACL(collectionsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(collectionsPath, new Stat()), commonConfigAcls), -1);
 
-    LOG.info("Set world:anyone to 'cr' on  {}/configs and {}/collections", client.getZnode(), client.getZnode());
+    logger.info("Set world:anyone to 'cr' on  {}/configs and {}/collections", client.getZnode(), client.getZnode());
     AclUtils.setRecursivelyOn(solrZooKeeper, configsPath, saslUserList);
     AclUtils.setRecursivelyOn(solrZooKeeper, collectionsPath, saslUserList);
 
@@ -79,7 +77,7 @@ protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zk
 
   private void createZnodeIfNeeded(String configsPath, SolrZkClient zkClient) throws KeeperException, InterruptedException {
     if (!zkClient.exists(configsPath, true)) {
-      LOG.info("'{}' does not exist. Creating it ...", configsPath);
+      logger.info("'{}' does not exist. Creating it ...", configsPath);
       zkClient.makePath(configsPath, true);
     }
   }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
index 2b360fbd..ec300b3a 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java
@@ -18,6 +18,10 @@
  */
 package org.apache.ambari.infra.solr.commands;
 
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
 import org.apache.ambari.infra.solr.domain.AmbariSolrState;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -27,13 +31,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.nio.charset.StandardCharsets;
-import java.util.HashMap;
-import java.util.Map;
-
 public class UpdateStateFileZkCommand extends AbstractStateFileZkCommand {
 
-  private static final Logger LOG = LoggerFactory.getLogger(UpdateStateFileZkCommand.class);
+  private static final Logger logger = LoggerFactory.getLogger(UpdateStateFileZkCommand.class);
 
   private String unsecureZnode;
 
@@ -46,13 +46,13 @@ public UpdateStateFileZkCommand(int maxRetries, int interval, String unsecureZno
   protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
     boolean secure = client.isSecure();
     String stateFile = String.format("%s/%s", unsecureZnode, AbstractStateFileZkCommand.STATE_FILE);
-    AmbariSolrState result = null;
+    AmbariSolrState result;
     if (secure) {
-      LOG.info("Update state file in secure mode.");
+      logger.info("Update state file in secure mode.");
       updateStateFile(client, zkClient, AmbariSolrState.SECURE, stateFile);
       result = AmbariSolrState.SECURE;
     } else {
-      LOG.info("Update state file in unsecure mode.");
+      logger.info("Update state file in unsecure mode.");
       updateStateFile(client, zkClient, AmbariSolrState.UNSECURE, stateFile);
       result = AmbariSolrState.UNSECURE;
     }
@@ -62,15 +62,15 @@ protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkC
   private void updateStateFile(AmbariSolrCloudClient client, SolrZkClient zkClient, AmbariSolrState stateToUpdate,
                                String stateFile) throws Exception {
     if (!zkClient.exists(stateFile, true)) {
-      LOG.info("State file does not exits. Initializing it as '{}'", stateToUpdate);
+      logger.info("State file does not exits. Initializing it as '{}'", stateToUpdate);
       zkClient.create(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8),
         CreateMode.PERSISTENT, true);
     } else {
       AmbariSolrState stateOnSecure = getStateFromJson(client, stateFile);
       if (stateToUpdate.equals(stateOnSecure)) {
-        LOG.info("State file is in '{}' mode. No update.", stateOnSecure);
+        logger.info("State file is in '{}' mode. No update.", stateOnSecure);
       } else {
-        LOG.info("State file is in '{}' mode. Updating it to '{}'", stateOnSecure, stateToUpdate);
+        logger.info("State file is in '{}' mode. Updating it to '{}'", stateOnSecure, stateToUpdate);
         zkClient.setData(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8), true);
       }
     }
diff --git a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
index f46565b4..d5559955 100644
--- a/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
+++ b/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java
@@ -18,20 +18,19 @@
  */
 package org.apache.ambari.infra.solr.util;
 
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 public class ShardUtils {
 
-  private static final Logger LOG = LoggerFactory.getLogger(ShardUtils.class);
+  private static final Logger logger = LoggerFactory.getLogger(ShardUtils.class);
 
   public static String generateShardListStr(int maxShardsPerNode) {
     String shardsListStr = "";
@@ -54,15 +53,13 @@ public static String generateShardListStr(int maxShardsPerNode) {
   }
 
   public static Collection<String> getShardNamesFromSlices(Collection<Slice> slices, String collection) {
-    Collection<String> result = new HashSet<String>();
-    Iterator<Slice> iter = slices.iterator();
-    while (iter.hasNext()) {
-      Slice slice = iter.next();
+    Collection<String> result = new HashSet<>();
+    for (Slice slice : slices) {
       for (Replica replica : slice.getReplicas()) {
-        LOG.info("collectionName=" + collection + ", slice.name="
-          + slice.getName() + ", slice.state=" + slice.getState()
-          + ", replica.core=" + replica.getStr("core")
-          + ", replica.state=" + replica.getStr("state"));
+        logger.info("collectionName=" + collection + ", slice.name="
+                + slice.getName() + ", slice.state=" + slice.getState()
+                + ", replica.core=" + replica.getStr("core")
+                + ", replica.state=" + replica.getStr("state"));
         result.add(slice.getName());
       }
     }
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
index 525c4192..510dc098 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java
@@ -27,7 +27,7 @@
 import org.slf4j.LoggerFactory;
 
 public class MetricsUtils {
-  private static final Logger LOG = LoggerFactory.getLogger(MetricsUtils.class);
+  private static final Logger logger = LoggerFactory.getLogger(MetricsUtils.class);
   private static final String APPID = "ambari-infra-solr";
   public static final String NAME_PREFIX = "infra.";
 
@@ -41,15 +41,15 @@ private static String initHostName() {
       String ipHostName = ip.getHostName();
       String canonicalHostName = ip.getCanonicalHostName();
       if (!canonicalHostName.equalsIgnoreCase(ipAddress)) {
-        LOG.info("Using InetAddress.getCanonicalHostName()={}", canonicalHostName);
+        logger.info("Using InetAddress.getCanonicalHostName()={}", canonicalHostName);
         hostName = canonicalHostName;
       } else {
-        LOG.info("Using InetAddress.getHostName()={}", ipHostName);
+        logger.info("Using InetAddress.getHostName()={}", ipHostName);
         hostName = ipHostName;
       }
-      LOG.info("ipAddress={}, ipHostName={}, canonicalHostName={}, hostName={}", ipAddress, ipHostName, canonicalHostName, hostName);
+      logger.info("ipAddress={}, ipHostName={}, canonicalHostName={}, hostName={}", ipAddress, ipHostName, canonicalHostName, hostName);
     } catch (UnknownHostException e) {
-      LOG.error("Error getting hostname.", e);
+      logger.error("Error getting hostname.", e);
     }
 
     return hostName;
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
index 9a837faa..9ebe77bb 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java
@@ -46,7 +46,7 @@
 
 public class ScheduledAMSReporter<T> extends ScheduledReporter {
 
-  private static final Logger LOG = LoggerFactory.getLogger(ScheduledAMSReporter.class);
+  private static final Logger logger = LoggerFactory.getLogger(ScheduledAMSReporter.class);
 
   private final SolrMetricsSink amsClient;
   private final String namePrefix;
@@ -92,7 +92,7 @@ public void report(SortedMap<String, Gauge> gauges,
       amsClient.emitMetrics(timelineMetrics);
     }
     catch (Exception ex) {
-      LOG.error("Unable to collect and send metrics", ex);
+      logger.error("Unable to collect and send metrics", ex);
     }
   }
 
@@ -100,7 +100,7 @@ private void addTimelineMetrics(String metricName, Gauge<T> gauge, long currentM
     try {
       timelineMetricList.addAll(gaugeConverter.convert(metricName, gauge, currentMillis));
     } catch (Exception ex) {
-      LOG.error("Unable to get value of gauge metric " + metricName, ex);
+      logger.error("Unable to get value of gauge metric " + metricName, ex);
     }
   }
 
@@ -111,7 +111,7 @@ private void addTimelineMetrics(String metricName, Timer timer, long currentTime
       timelineMetricList.add(toTimelineMetric(metricName + ".avgTimePerRequest", snapshot.getMean(), currentTime));
       timelineMetricList.add(toTimelineMetric(metricName + ".medianRequestTime", snapshot.getMedian(), currentTime));
     } catch (Exception ex) {
-      LOG.error("Unable to get value of timer metric " + metricName, ex);
+      logger.error("Unable to get value of timer metric " + metricName, ex);
     }
   }
 }
diff --git a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
index 201c7977..9236d5dc 100644
--- a/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
+++ b/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java
@@ -30,14 +30,14 @@
 import org.slf4j.LoggerFactory;
 
 public class SolrMetricsSink extends AbstractTimelineMetricsSink {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrMetricsSink.class);
+  private static final Logger logger = LoggerFactory.getLogger(SolrMetricsSink.class);
 
   private final Collection<String> collectorHosts;
   private final int port;
   private final AMSProtocol protocol;
 
   public SolrMetricsSink(String[] collectorHosts, int port, AMSProtocol protocol, SolrMetricsSecurityConfig securityConfig) {
-    LOG.info("Setting up SolrMetricsSink protocol={} hosts={} port={}", protocol.name(), join(collectorHosts, ","), port);
+    logger.info("Setting up SolrMetricsSink protocol={} hosts={} port={}", protocol.name(), join(collectorHosts, ","), port);
     this.collectorHosts = asList(collectorHosts);
     this.port = port;
     this.protocol = protocol;


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services