You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2019/07/10 09:30:30 UTC
[griffin] branch master updated: [GRIFFIN-263] Use independent
logger for griffin
This is an automated email from the ASF dual-hosted git repository.
guoyp pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/griffin.git
The following commit(s) were added to refs/heads/master by this push:
new 073c6f6 [GRIFFIN-263] Use independent logger for griffin
073c6f6 is described below
commit 073c6f61ea7aaf27e1c7cc5c4729c90f69805afe
Author: wankunde <wa...@163.com>
AuthorDate: Wed Jul 10 17:30:13 2019 +0800
[GRIFFIN-263] Use independent logger for griffin
When we run DQJob, user can only set logger level for spark execution engine.
```java
sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
```
We have independent logger for griffin, which would be very helpful to debug or understand the internal of griffin.
```java
var logLevel = getGriffinLogLevel() sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel) griffinLogger.setLevel(logLevel)
```
Author: wankunde <wa...@163.com>
Closes #507 from wankunde/loggable.
---
.../main/scala/org/apache/griffin/measure/Loggable.scala | 15 +++++++++++++--
.../apache/griffin/measure/launch/batch/BatchDQApp.scala | 2 ++
.../griffin/measure/launch/streaming/StreamingDQApp.scala | 2 ++
3 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala b/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
index 2e113ab..558eb6d 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/Loggable.scala
@@ -18,11 +18,22 @@ under the License.
*/
package org.apache.griffin.measure
-import org.slf4j.LoggerFactory
+import org.apache.log4j.Level
+import org.apache.log4j.Logger
trait Loggable {
- @transient private lazy val logger = LoggerFactory.getLogger(getClass)
+ @transient private lazy val logger = Logger.getLogger(getClass)
+
+ @transient protected lazy val griffinLogger = Logger.getLogger("org.apache.griffin")
+
+ def getGriffinLogLevel(): Level = {
+ var logger = griffinLogger
+ while (logger != null && logger.getLevel == null) {
+ logger = logger.getParent.asInstanceOf[Logger]
+ }
+ logger.getLevel
+ }
protected def info(msg: String): Unit = {
logger.info(msg)
diff --git a/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala b/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
index 97bffdd..fe03508 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/launch/batch/BatchDQApp.scala
@@ -57,7 +57,9 @@ case class BatchDQApp(allParam: GriffinConfig) extends DQApp {
conf.setAll(sparkParam.getConfig)
conf.set("spark.sql.crossJoin.enabled", "true")
sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
+ var logLevel = getGriffinLogLevel()
sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
+ griffinLogger.setLevel(logLevel)
sqlContext = sparkSession.sqlContext
// register udf
diff --git a/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala b/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
index 0de8980..502f8cb 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/launch/streaming/StreamingDQApp.scala
@@ -63,7 +63,9 @@ case class StreamingDQApp(allParam: GriffinConfig) extends DQApp {
conf.setAll(sparkParam.getConfig)
conf.set("spark.sql.crossJoin.enabled", "true")
sparkSession = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
+ var logLevel = getGriffinLogLevel()
sparkSession.sparkContext.setLogLevel(sparkParam.getLogLevel)
+ griffinLogger.setLevel(logLevel)
sqlContext = sparkSession.sqlContext
// clear checkpoint directory