You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2024/01/20 00:45:19 UTC
(griffin) branch griffin-0.7.0-rc0 updated: 添加错误堆栈信息输出 (#639)
This is an automated email from the ASF dual-hosted git repository.
guoyp pushed a commit to branch griffin-0.7.0-rc0
in repository https://gitbox.apache.org/repos/asf/griffin.git
The following commit(s) were added to refs/heads/griffin-0.7.0-rc0 by this push:
new f814e8f9 添加错误堆栈信息输出 (#639)
f814e8f9 is described below
commit f814e8f907b49c8a81d6336f8f201bcb1ceaeab7
Author: neo <63...@qq.com>
AuthorDate: Sat Jan 20 08:45:14 2024 +0800
添加错误堆栈信息输出 (#639)
* 添加错误堆栈信息输出
* 添加错误堆栈信息输出
---------
Co-authored-by: guoqiang04 <gu...@baidu.com>
---
measure/src/main/resources/env-batch.json | 12 +------
measure/src/main/resources/mysql.json | 41 ++++++++++++++++++++++
.../connector/batch/JDBCBasedDataConnector.scala | 2 +-
3 files changed, 43 insertions(+), 12 deletions(-)
diff --git a/measure/src/main/resources/env-batch.json b/measure/src/main/resources/env-batch.json
index bbec4e5c..41451dde 100644
--- a/measure/src/main/resources/env-batch.json
+++ b/measure/src/main/resources/env-batch.json
@@ -17,20 +17,10 @@
"name": "hdfsSink",
"type": "HDFS",
"config": {
- "path": "hdfs://localhost/griffin/batch/persist",
+ "path": "hdfs://localhost:9000/griffin/batch/persist",
"max.persist.lines": 10000,
"max.lines.per.file": 10000
}
- },
- {
- "name": "elasticSink",
- "type": "ELASTICSEARCH",
- "config": {
- "method": "post",
- "api": "http://localhost:9200/griffin/accuracy",
- "connection.timeout": "1m",
- "retry": 10
- }
}
],
"griffin.checkpoint": []
diff --git a/measure/src/main/resources/mysql.json b/measure/src/main/resources/mysql.json
new file mode 100644
index 00000000..09d14149
--- /dev/null
+++ b/measure/src/main/resources/mysql.json
@@ -0,0 +1,41 @@
+{
+ "name":"prof_batch",
+ "process.type":"BATCH",
+ "data.sources":[
+ {
+ "name":"src",
+ "baseline": true,
+ "connector":{
+ "type":"jdbc",
+ "config":{
+ "database":"test1",
+ "tablename":"student",
+ "url":"jdbc:mysql://localhost:3306/test",
+ "user":"test",
+ "password":"123456",
+ "driver":"com.mysql.jdbc.Driver",
+ "where":""
+ }
+ }
+ }
+ ],
+ "evaluate.rule":{
+ "rules":[
+ {
+ "dsl.type":"griffin-dsl",
+ "dq.type":"PROFILING",
+ "out.dataframe.name":"prof",
+ "rule":"src.id where src.age > 20",
+ "out":[
+ {
+ "type":"metric",
+ "name":"prof"
+ }
+ ]
+ }
+ ]
+ },
+ "sinks":[
+ "CONSOLESink"
+ ]
+}
\ No newline at end of file
diff --git a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/JDBCBasedDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/JDBCBasedDataConnector.scala
index 9fc21d72..123621d4 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/JDBCBasedDataConnector.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/datasource/connector/batch/JDBCBasedDataConnector.scala
@@ -80,7 +80,7 @@ case class JDBCBasedDataConnector(
dfOpt match {
case Success(_) =>
case Failure(exception) =>
- griffinLogger.error("Error occurred while reading data set.", exception)
+ error("Error occurred while reading data set.", exception)
}
val preDfOpt = preProcess(dfOpt.toOption, ms)