You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemds.apache.org by mb...@apache.org on 2020/07/25 20:30:56 UTC

[systemds] branch master updated: [SYSTEMDS-135] Simplified log4j.properties files

This is an automated email from the ASF dual-hosted git repository.

mboehm7 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/systemds.git


The following commit(s) were added to refs/heads/master by this push:
     new 1e626c0  [SYSTEMDS-135] Simplified log4j.properties files
1e626c0 is described below

commit 1e626c02c1d74ec3ee6080ec54971f87d66a5279
Author: baunsgaard <ba...@tugraz.at>
AuthorDate: Sat Jul 25 22:15:12 2020 +0200

    [SYSTEMDS-135] Simplified log4j.properties files
    
    - Testing Log4J file simplification
    - add default log4j file
    
    Closes #980.
---
 .gitignore                                         |   2 -
 conf/log4j-silent.properties                       | 282 ---------------------
 .../{systemds-env.sh.template => log4j.properties} |  14 +-
 conf/log4j.properties.template                     | 260 +------------------
 src/test/resources/log4j.properties                | 257 +------------------
 5 files changed, 17 insertions(+), 798 deletions(-)

diff --git a/.gitignore b/.gitignore
index 89a6a00..ba41a6b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -56,8 +56,6 @@ src/main/python/tests/onnx_systemds/test_models/*.onnx
 
 # User configuration files
 conf/SystemDS-config.xml
-conf/log4j.properties
-conf/systemds-env.sh
 
 # Documentation artifacts
 docs/_site
diff --git a/conf/log4j-silent.properties b/conf/log4j-silent.properties
deleted file mode 100644
index 356bda0..0000000
--- a/conf/log4j-silent.properties
+++ /dev/null
@@ -1,282 +0,0 @@
-#-------------------------------------------------------------
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#-------------------------------------------------------------
-
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-hadoop.security.logger=OFF
-
-# Security appender
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
-log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
-
-log4j.logger.org.apache.spark=ERROR
-
-#
-# Job Summary Appender 
-#
-# Use following logger to send summary to separate file defined by 
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-# 
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=ALL
-
-# 
-# Guardim Proxy setup - HDFS, MapReduce and Hadoop RPC
-# 
-log4j.appender.GuardiumProxyAppender=org.apache.log4j.net.SocketAppender
-log4j.appender.GuardiumProxyAppender.RemoteHost=
-log4j.appender.GuardiumProxyAppender.Port=
-log4j.appender.GuardiumProxyAppender.RecoveryFile=audit-${hadoop.log.file}
-log4j.appender.GuardiumProxyAppender.Threshold=INFO
-
-# Hdfs audit logs
-log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
-log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hadoop.security.logger}
-
-hdfs.audit.logger=INFO,NullAppender
-hdfs.audit.log.maxfilesize=256MB
-hdfs.audit.log.maxbackupindex=20
-log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
-log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize}
-log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
-
-# MapReduce audit logs
-log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
-log4j.logger.org.apache.hadoop.mapred.AuditLogger=${hadoop.security.logger}
-
-mapred.audit.logger=INFO,NullAppender
-mapred.audit.log.maxfilesize=256MB
-mapred.audit.log.maxbackupindex=20
-log4j.appender.MRAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
-log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.MRAUDIT.MaxFileSize=${mapred.audit.log.maxfilesize}
-log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
-
-# Hadoop RPC audit logs
-log4j.additivity.SecurityLogger=false
-log4j.logger.SecurityLogger=${hadoop.security.logger}
-
-log4j.appender.hadoopaudit=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.hadoopaudit.DatePattern='.'yyyy-MM-dd
-log4j.appender.hadoopaudit.File=${hadoop.log.dir}/audit-${hadoop.log.file}
-log4j.appender.hadoopaudit.Append=true
-log4j.appender.hadoopaudit.layout=org.apache.log4j.PatternLayout
-log4j.appender.hadoopaudit.layout.ConversionPattern=%d{ISO8601} %5p %c - %m%n
-
-#
-# Daily Rolling File Appender
-#
-
-#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-#log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollver at midnight
-#log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-#log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogSocketAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-#Security audit appender
-#
-
-hadoop.security.log.file=SecurityAuth.audit
-log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
-log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# Rolling File Appender
-#
-
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-log4j.appender.RFA.MaxFileSize=10MB
-log4j.appender.RFA.MaxBackupIndex=3
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-#Logger for streaming Job Configuration
-#
-log4j.logger.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=INFO,${SAJC}
-log4j.additivity.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=false
-
-#
-#Socket Appender for streaming Job Configuration
-#
-log4j.appender.job.conf=org.apache.log4j.net.SocketAppender
-log4j.appender.job.conf.RemoteHost=localhost
-log4j.appender.job.conf.Port=${JOBCONF_LOGGING_PORT}
-log4j.appender.job.conf.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.conf.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.conf.appender.ReconnectionDelay=120000
-
-
-#
-#Logger for streaming task attempt logs
-#
-log4j.logger.org.apache.hadoop.mapred.TaskLogSocketAppender=INFO,${SATA}
-log4j.additivity.org.apache.hadoop.mapred.TaskLogSocketAppender=false
-
-#
-#Socket appender for streaming task attempt logs
-#
-log4j.appender.task.attempt.log=org.apache.log4j.net.SocketAppender
-log4j.appender.task.attempt.log.RemoteHost=localhost
-log4j.appender.task.attempt.log.Port=${TASKATTEMPT_LOGGING_PORT}
-log4j.appender.task.attempt.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.task.attempt.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.task.attempt.log.appender.ReconnectionDelay=120000
-
-#
-#Socket Appender for Streaming NameNode,SecondaryNameNode and JobTracker Logs 
-#
-log4j.appender.socket.appender=org.apache.log4j.net.SocketAppender
-log4j.appender.socket.appender.RemoteHost=localhost
-log4j.appender.socket.appender.Port=${HADOOP_LOGGING_PORT}
-log4j.appender.socket.appender.layout=org.apache.log4j.PatternLayout
-log4j.appender.socket.appender.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.socket.appender.ReconnectionDelay=120000
-
-#
-#Logger for streaming Job History Logs
-#
-log4j.logger.JobHistoryLogs=INFO,${SAJH}
-log4j.additivity.JobHistoryLogs=false
-
-#
-#Socket Appender for Job History Logs
-#
-log4j.appender.job.history.log=org.apache.log4j.net.SocketAppender
-log4j.appender.job.history.log.RemoteHost=localhost
-log4j.appender.job.history.log.Port=${JOBHISTORY_LOGGING_PORT}
-log4j.appender.job.history.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.history.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.history.appender.ReconnectionDelay=120000
-
-
-
-# Custom Logging levels
-
-hadoop.metrics.log.level=INFO
-#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
-#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
-
-# Jets3t library
-log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
-
-#
-# Null Appender
-# Trap security logger on the hadoop client side
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-#
-# Job Summary Appender
-#
-log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
-log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
-log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.JSA.DatePattern=.yyyy-MM-dd
-log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
-log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
diff --git a/conf/systemds-env.sh.template b/conf/log4j.properties
similarity index 70%
rename from conf/systemds-env.sh.template
rename to conf/log4j.properties
index 6036aa1..607c7cf 100644
--- a/conf/systemds-env.sh.template
+++ b/conf/log4j.properties
@@ -1,4 +1,3 @@
-#!/usr/bin/env bash
 #-------------------------------------------------------------
 #
 # Licensed to the Apache Software Foundation (ASF) under one
@@ -20,10 +19,13 @@
 #
 #-------------------------------------------------------------
 
-# This file is sourced when running the bin/systemds-standalone script.
-# Copy it as systemds-env.sh and edit it to configure SystemDS.
+log4j.rootLogger=ERROR,console
 
-# Example of adding additional Java execution options, which will
-# override defaults as necessary.
-#SYSTEMDS_JAVA_OPTS="-Xmx12g -Xms8g"
+log4j.logger.org.apache.sysds=ERROR
+log4j.logger.org.apache.spark=OFF
+log4j.logger.org.apache.hadoop=OFF
 
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
\ No newline at end of file
diff --git a/conf/log4j.properties.template b/conf/log4j.properties.template
index a474665..607c7cf 100644
--- a/conf/log4j.properties.template
+++ b/conf/log4j.properties.template
@@ -19,263 +19,13 @@
 #
 #-------------------------------------------------------------
 
+log4j.rootLogger=ERROR,console
 
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-hadoop.security.logger=OFF
-
-# Security appender
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
-log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
-
-#
-# Job Summary Appender 
-#
-# Use following logger to send summary to separate file defined by 
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-# 
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=ALL
-
-# 
-# Guardim Proxy setup - HDFS, MapReduce and Hadoop RPC
-# 
-log4j.appender.GuardiumProxyAppender=org.apache.log4j.net.SocketAppender
-log4j.appender.GuardiumProxyAppender.RemoteHost=
-log4j.appender.GuardiumProxyAppender.Port=
-log4j.appender.GuardiumProxyAppender.RecoveryFile=audit-${hadoop.log.file}
-log4j.appender.GuardiumProxyAppender.Threshold=INFO
-
-# Hdfs audit logs
-log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
-log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hadoop.security.logger}
-
-hdfs.audit.logger=INFO,NullAppender
-hdfs.audit.log.maxfilesize=256MB
-hdfs.audit.log.maxbackupindex=20
-log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
-log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize}
-log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
-
-# MapReduce audit logs
-log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
-log4j.logger.org.apache.hadoop.mapred.AuditLogger=${hadoop.security.logger}
-
-mapred.audit.logger=INFO,NullAppender
-mapred.audit.log.maxfilesize=256MB
-mapred.audit.log.maxbackupindex=20
-log4j.appender.MRAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
-log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.MRAUDIT.MaxFileSize=${mapred.audit.log.maxfilesize}
-log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
-
-# Hadoop RPC audit logs
-log4j.additivity.SecurityLogger=false
-log4j.logger.SecurityLogger=${hadoop.security.logger}
-
-log4j.appender.hadoopaudit=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.hadoopaudit.DatePattern='.'yyyy-MM-dd
-log4j.appender.hadoopaudit.File=${hadoop.log.dir}/audit-${hadoop.log.file}
-log4j.appender.hadoopaudit.Append=true
-log4j.appender.hadoopaudit.layout=org.apache.log4j.PatternLayout
-log4j.appender.hadoopaudit.layout.ConversionPattern=%d{ISO8601} %5p %c - %m%n
-
-#
-# Daily Rolling File Appender
-#
-
-#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-#log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollver at midnight
-#log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-#log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
+log4j.logger.org.apache.sysds=ERROR
+log4j.logger.org.apache.spark=OFF
+log4j.logger.org.apache.hadoop=OFF
 
 log4j.appender.console=org.apache.log4j.ConsoleAppender
 log4j.appender.console.target=System.err
 log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogSocketAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-#Security audit appender
-#
-
-hadoop.security.log.file=SecurityAuth.audit
-log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
-log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# Rolling File Appender
-#
-
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-log4j.appender.RFA.MaxFileSize=10MB
-log4j.appender.RFA.MaxBackupIndex=3
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-#Logger for streaming Job Configuration
-#
-log4j.logger.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=INFO,${SAJC}
-log4j.additivity.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=false
-
-#
-#Socket Appender for streaming Job Configuration
-#
-log4j.appender.job.conf=org.apache.log4j.net.SocketAppender
-log4j.appender.job.conf.RemoteHost=localhost
-log4j.appender.job.conf.Port=${JOBCONF_LOGGING_PORT}
-log4j.appender.job.conf.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.conf.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.conf.appender.ReconnectionDelay=120000
-
-
-#
-#Logger for streaming task attempt logs
-#
-log4j.logger.org.apache.hadoop.mapred.TaskLogSocketAppender=INFO,${SATA}
-log4j.additivity.org.apache.hadoop.mapred.TaskLogSocketAppender=false
-
-#
-#Socket appender for streaming task attempt logs
-#
-log4j.appender.task.attempt.log=org.apache.log4j.net.SocketAppender
-log4j.appender.task.attempt.log.RemoteHost=localhost
-log4j.appender.task.attempt.log.Port=${TASKATTEMPT_LOGGING_PORT}
-log4j.appender.task.attempt.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.task.attempt.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.task.attempt.log.appender.ReconnectionDelay=120000
-
-#
-#Socket Appender for Streaming NameNode,SecondaryNameNode and JobTracker Logs 
-#
-log4j.appender.socket.appender=org.apache.log4j.net.SocketAppender
-log4j.appender.socket.appender.RemoteHost=localhost
-log4j.appender.socket.appender.Port=${HADOOP_LOGGING_PORT}
-log4j.appender.socket.appender.layout=org.apache.log4j.PatternLayout
-log4j.appender.socket.appender.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.socket.appender.ReconnectionDelay=120000
-
-#
-#Logger for streaming Job History Logs
-#
-log4j.logger.JobHistoryLogs=INFO,${SAJH}
-log4j.additivity.JobHistoryLogs=false
-
-#
-#Socket Appender for Job History Logs
-#
-log4j.appender.job.history.log=org.apache.log4j.net.SocketAppender
-log4j.appender.job.history.log.RemoteHost=localhost
-log4j.appender.job.history.log.Port=${JOBHISTORY_LOGGING_PORT}
-log4j.appender.job.history.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.history.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.history.appender.ReconnectionDelay=120000
-
-
-
-# Custom Logging levels
-
-hadoop.metrics.log.level=INFO
-#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
-#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
-
-# Jets3t library
-log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
-
-#
-# Null Appender
-# Trap security logger on the hadoop client side
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-#
-# Job Summary Appender
-#
-log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
-log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
-log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.JSA.DatePattern=.yyyy-MM-dd
-log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
-log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
\ No newline at end of file
diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties
index 81ce25e..eb44347 100644
--- a/src/test/resources/log4j.properties
+++ b/src/test/resources/log4j.properties
@@ -19,262 +19,13 @@
 #
 #-------------------------------------------------------------
 
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=ERROR,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-hadoop.security.logger=OFF
+log4j.rootLogger=ERROR,console
 
-# Security appender
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
-log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
-
-#
-# Job Summary Appender 
-#
-# Use following logger to send summary to separate file defined by 
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-# 
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=ALL
-
-# 
-# Guardim Proxy setup - HDFS, MapReduce and Hadoop RPC
-# 
-log4j.appender.GuardiumProxyAppender=org.apache.log4j.net.SocketAppender
-log4j.appender.GuardiumProxyAppender.RemoteHost=
-log4j.appender.GuardiumProxyAppender.Port=
-log4j.appender.GuardiumProxyAppender.RecoveryFile=audit-${hadoop.log.file}
-log4j.appender.GuardiumProxyAppender.Threshold=INFO
-
-# Hdfs audit logs
-log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
-log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hadoop.security.logger}
-
-hdfs.audit.logger=INFO,NullAppender
-hdfs.audit.log.maxfilesize=256MB
-hdfs.audit.log.maxbackupindex=20
-log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
-log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize}
-log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
-
-# MapReduce audit logs
-log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
-log4j.logger.org.apache.hadoop.mapred.AuditLogger=${hadoop.security.logger}
-
-mapred.audit.logger=INFO,NullAppender
-mapred.audit.log.maxfilesize=256MB
-mapred.audit.log.maxbackupindex=20
-log4j.appender.MRAUDIT=org.apache.log4j.RollingFileAppender
-log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
-log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.MRAUDIT.MaxFileSize=${mapred.audit.log.maxfilesize}
-log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
-
-# Hadoop RPC audit logs
-log4j.additivity.SecurityLogger=false
-log4j.logger.SecurityLogger=${hadoop.security.logger}
-
-log4j.appender.hadoopaudit=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.hadoopaudit.DatePattern='.'yyyy-MM-dd
-log4j.appender.hadoopaudit.File=${hadoop.log.dir}/audit-${hadoop.log.file}
-log4j.appender.hadoopaudit.Append=true
-log4j.appender.hadoopaudit.layout=org.apache.log4j.PatternLayout
-log4j.appender.hadoopaudit.layout.ConversionPattern=%d{ISO8601} %5p %c - %m%n
-
-#
-# Daily Rolling File Appender
-#
-
-#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-#log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollver at midnight
-#log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-#log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
+log4j.logger.org.apache.sysds=ERROR
+log4j.logger.org.apache.spark=OFF
+log4j.logger.org.apache.hadoop=OFF
 
 log4j.appender.console=org.apache.log4j.ConsoleAppender
 log4j.appender.console.target=System.err
 log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogSocketAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-#Security audit appender
-#
-
-hadoop.security.log.file=SecurityAuth.audit
-log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
-log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# Rolling File Appender
-#
-
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-# Logfile size and and 30-day backups
-log4j.appender.RFA.MaxFileSize=10MB
-log4j.appender.RFA.MaxBackupIndex=3
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-#
-#Logger for streaming Job Configuration
-#
-log4j.logger.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=INFO,${SAJC}
-log4j.additivity.org.apache.hadoop.mapred.JobTrackerConfLogStreaming=false
-
-#
-#Socket Appender for streaming Job Configuration
-#
-log4j.appender.job.conf=org.apache.log4j.net.SocketAppender
-log4j.appender.job.conf.RemoteHost=localhost
-log4j.appender.job.conf.Port=${JOBCONF_LOGGING_PORT}
-log4j.appender.job.conf.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.conf.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.conf.appender.ReconnectionDelay=120000
-
-
-#
-#Logger for streaming task attempt logs
-#
-log4j.logger.org.apache.hadoop.mapred.TaskLogSocketAppender=INFO,${SATA}
-log4j.additivity.org.apache.hadoop.mapred.TaskLogSocketAppender=false
-
-#
-#Socket appender for streaming task attempt logs
-#
-log4j.appender.task.attempt.log=org.apache.log4j.net.SocketAppender
-log4j.appender.task.attempt.log.RemoteHost=localhost
-log4j.appender.task.attempt.log.Port=${TASKATTEMPT_LOGGING_PORT}
-log4j.appender.task.attempt.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.task.attempt.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.task.attempt.log.appender.ReconnectionDelay=120000
-
-#
-#Socket Appender for Streaming NameNode,SecondaryNameNode and JobTracker Logs 
-#
-log4j.appender.socket.appender=org.apache.log4j.net.SocketAppender
-log4j.appender.socket.appender.RemoteHost=localhost
-log4j.appender.socket.appender.Port=${HADOOP_LOGGING_PORT}
-log4j.appender.socket.appender.layout=org.apache.log4j.PatternLayout
-log4j.appender.socket.appender.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.socket.appender.ReconnectionDelay=120000
-
-#
-#Logger for streaming Job History Logs
-#
-log4j.logger.JobHistoryLogs=INFO,${SAJH}
-log4j.additivity.JobHistoryLogs=false
-
-#
-#Socket Appender for Job History Logs
-#
-log4j.appender.job.history.log=org.apache.log4j.net.SocketAppender
-log4j.appender.job.history.log.RemoteHost=localhost
-log4j.appender.job.history.log.Port=${JOBHISTORY_LOGGING_PORT}
-log4j.appender.job.history.log.layout=org.apache.log4j.PatternLayout
-log4j.appender.job.history.log.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.job.history.appender.ReconnectionDelay=120000
-
-
-
-# Custom Logging levels
-
-hadoop.metrics.log.level=INFO
-#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
-#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
-
-# Jets3t library
-log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
-
-#
-# Null Appender
-# Trap security logger on the hadoop client side
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-#
-# Job Summary Appender
-#
-log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
-log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
-log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.JSA.DatePattern=.yyyy-MM-dd
-log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
-log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false