You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2017/09/30 08:35:16 UTC

[01/11] incubator-griffin git commit: Dsl modify

Repository: incubator-griffin
Updated Branches:
  refs/heads/master ac8351f0c -> 4aa6f7799


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/RuleAnalyzerTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleAnalyzerTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/RuleAnalyzerTest.scala
deleted file mode 100644
index 47b36e8..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleAnalyzerTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-
-@RunWith(classOf[JUnitRunner])
-class RuleAnalyzerTest extends FunSuite with BeforeAndAfter with Matchers {
-
-  test ("rule analyze") {
-    val rule = "$source.name = $target.name AND $source.age = $target.age + (2 * 5) AND $source.born > (6 - 2 * 2)"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    val statement = ruleFactory.generateRule
-
-    val ruleAnalyzer = RuleAnalyzer(statement)
-
-    ruleAnalyzer.constCacheExprs.map(_.desc) should be (List[String]("2 * 5", "2 * 2", "6 - 2 * 2"))
-    ruleAnalyzer.constFinalCacheExprs.map(_.desc) should be (Set[String]("2 * 5", "6 - 2 * 2"))
-
-    ruleAnalyzer.sourceRuleExprs.groupbyExprs.map(_.desc) should be (List[String](
-      "$source['name']", "$source['age']"))
-    ruleAnalyzer.sourceRuleExprs.cacheExprs.map(_.desc) should be (List[String](
-      "$source['name']", "$source['age']", "$source['born']", "$source['born'] > 6 - 2 * 2"))
-    ruleAnalyzer.sourceRuleExprs.finalCacheExprs.map(_.desc) should be (Set[String](
-      "$source['name']", "$source['age']", "$source['born']", "$source['born'] > 6 - 2 * 2"))
-    ruleAnalyzer.sourceRuleExprs.persistExprs.map(_.desc) should be (List[String](
-      "$source['name']", "$source['age']", "$source['born']"))
-
-    ruleAnalyzer.targetRuleExprs.groupbyExprs.map(_.desc) should be (List[String](
-      "$target['name']", "$target['age'] + 2 * 5"))
-    ruleAnalyzer.targetRuleExprs.cacheExprs.map(_.desc) should be (List[String](
-      "$target['name']", "$target['age']", "$target['age'] + 2 * 5"))
-    ruleAnalyzer.targetRuleExprs.finalCacheExprs.map(_.desc) should be (Set[String](
-      "$target['name']", "$target['age']", "$target['age'] + 2 * 5"))
-    ruleAnalyzer.targetRuleExprs.persistExprs.map(_.desc) should be (List[String](
-      "$target['name']", "$target['age']"))
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/RuleFactoryTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleFactoryTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/RuleFactoryTest.scala
deleted file mode 100644
index c14cd04..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleFactoryTest.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-
-@RunWith(classOf[JUnitRunner])
-class RuleFactoryTest extends FunSuite with BeforeAndAfter with Matchers {
-
-  test ("generate rule") {
-    val rule = "$source.name = $target.name AND $source.age = $target.age"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    ruleFactory.generateRule.desc should be ("$source['name'] = $target['name'] AND $source['age'] = $target['age']")
-
-    val wrong_rule = "$source.name = $target.name AND $source.age = $target1.age"
-    val evaluateRuleParam1 = EvaluateRuleParam(1.0, wrong_rule)
-    val ruleFactory1 = RuleFactory(evaluateRuleParam1)
-    val thrown = intercept[Exception] {
-      ruleFactory1.generateRule
-    }
-    thrown.getMessage should be ("parse rule error!")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/RuleParserTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleParserTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/RuleParserTest.scala
deleted file mode 100644
index 1d15375..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/rule/RuleParserTest.scala
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//import org.scalatest.FlatSpec
-//import org.scalamock.scalatest.MockFactory
-
-@RunWith(classOf[JUnitRunner])
-class RuleParserTest extends FunSuite with Matchers with BeforeAndAfter {
-
-  val ruleParser = RuleParser()
-
-  test ("literal number") {
-    val rule1 = "123"
-    val result1 = ruleParser.parseAll(ruleParser.literal, rule1)
-    result1.successful should be (true)
-    result1.get.value should be (Some(123))
-
-    val rule2 = "12.3"
-    val result2 = ruleParser.parseAll(ruleParser.literal, rule2)
-    result2.successful should be (true)
-    result2.get.value should be (Some(12.3))
-  }
-
-  test ("literial string") {
-    val rule1 = "'123'"
-    val result1 = ruleParser.parseAll(ruleParser.literal, rule1)
-    result1.successful should be (true)
-    result1.get.value should be (Some("123"))
-
-    val rule2 = "\"123\""
-    val result2 = ruleParser.parseAll(ruleParser.literal, rule1)
-    result2.successful should be (true)
-    result2.get.value should be (Some("123"))
-
-    val rule3 = "'1+2-3'"
-    val result3 = ruleParser.parseAll(ruleParser.literal, rule3)
-    result3.successful should be (true)
-    result3.get.value should be (Some("1+2-3"))
-  }
-
-  test ("literial time") {
-    val rule = "3h"
-    val result = ruleParser.parseAll(ruleParser.literal, rule)
-    result.successful should be (true)
-    result.get.value should be (Some(3*3600*1000))
-  }
-
-  test ("literial boolean") {
-    val rule = "true"
-    val result = ruleParser.parseAll(ruleParser.literal, rule)
-    result.successful should be (true)
-    result.get.value should be (Some(true))
-  }
-
-  test ("literial null") {
-    val rule = "null"
-    val result = ruleParser.parseAll(ruleParser.literal, rule)
-    result.successful should be (true)
-    result.get.value should be (Some(null))
-  }
-
-  test ("literial none") {
-    val rule = "none"
-    val result = ruleParser.parseAll(ruleParser.literal, rule)
-    result.successful should be (true)
-    result.get.value should be (None)
-  }
-
-  test ("selection head") {
-    val rule = "$source"
-    val result = ruleParser.parseAll(ruleParser.selectionHead, rule)
-    result.successful should be (true)
-    result.get.head should be ("source")
-  }
-
-  test ("field select") {
-    val rule = ".name"
-    val result = ruleParser.parseAll(ruleParser.selector, rule)
-    result.successful should be (true)
-    result.get.desc should be ("['name']")
-  }
-
-  test ("function operation") {
-    val rule = ".func(1, 'abc', 3 + 4)"
-    val result = ruleParser.parseAll(ruleParser.selector, rule)
-    result.successful should be (true)
-    result.get.desc should be (".func(1, 'abc', 3 + 4)")
-  }
-
-  test ("index field range select") {
-    val rule1 = "['field']"
-    val result1 = ruleParser.parseAll(ruleParser.selector, rule1)
-    result1.successful should be (true)
-    result1.get.desc should be ("['field']")
-
-    val rule2 = "[1, 4]"
-    val result2 = ruleParser.parseAll(ruleParser.selector, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("[1, 4]")
-
-    val rule3 = "[1, 'name', 'age', 5, (6, 8)]"
-    val result3 = ruleParser.parseAll(ruleParser.selector, rule3)
-    result3.successful should be (true)
-    result3.get.desc should be ("[1, 'name', 'age', 5, (6, 8)]")
-  }
-
-  test ("index field range") {
-    val rule1 = "(3, 5)"
-    val result1 = ruleParser.parseAll(ruleParser.indexFieldRange, rule1)
-    result1.successful should be (true)
-    result1.get.desc should be ("(3, 5)")
-
-    val rule2 = "'name'"
-    val result2 = ruleParser.parseAll(ruleParser.indexFieldRange, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("'name'")
-
-    val rule3 = "*"
-    val result3 = ruleParser.parseAll(ruleParser.indexFieldRange, rule3)
-    result3.successful should be (true)
-    result3.get.desc should be ("*")
-  }
-
-  test ("filter select") {
-    val rule = "['age' > 16]"
-    val result = ruleParser.parseAll(ruleParser.selector, rule)
-    result.successful should be (true)
-    result.get.desc should be ("['age' > 16]")
-  }
-
-  test ("selection") {
-    val rule = "$source['age' > 16].func(1, 'abc')[1, 3, 'name'].time[*]"
-    val result = ruleParser.parseAll(ruleParser.selection, rule)
-    result.successful should be (true)
-    result.get.desc should be ("$source['age' > 16].func(1, 'abc')[1, 3, 'name']['time'][*]")
-  }
-
-  test ("math expr") {
-    val rule = "$source.age * 6 + 4 / 2"
-    val result = ruleParser.parseAll(ruleParser.mathExpr, rule)
-    result.successful should be (true)
-    result.get.desc should be ("$source['age'] * 6 + 4 / 2")
-
-    val rule2 = "'age + 1' / 'vv'"
-    val result2 = ruleParser.parseAll(ruleParser.mathExpr, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("'age + 1' / 'vv'")
-    println(result2)
-  }
-
-  test ("range expr") {
-    val rule = "($source.age + 1, $target.age + 3, 40)"
-    val result = ruleParser.parseAll(ruleParser.rangeExpr, rule)
-    result.successful should be (true)
-    result.get.desc should be ("($source['age'] + 1, $target['age'] + 3, 40)")
-  }
-
-  test ("logical expr") {
-    val rule1 = "$source.age + 1 = $target.age"
-    val result1 = ruleParser.parseAll(ruleParser.logicalExpr, rule1)
-    result1.successful should be (true)
-    result1.get.desc should be ("$source['age'] + 1 = $target['age']")
-
-    val rule2 = "$source.age in (3, 5, 6, 10)"
-    val result2 = ruleParser.parseAll(ruleParser.logicalExpr, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("$source['age'] in (3, 5, 6, 10)")
-  }
-
-  test ("logical statement") {
-    val rule1 = "$source.descs[0] = $target.desc AND $source.name = $target.name"
-    val result1 = ruleParser.parseAll(ruleParser.logicalStatement, rule1)
-    result1.successful should be (true)
-    result1.get.desc should be ("$source['descs'][0] = $target['desc'] AND $source['name'] = $target['name']")
-
-    val rule2 = "NOT $source.age = $target.age"
-    val result2 = ruleParser.parseAll(ruleParser.logicalStatement, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("NOT $source['age'] = $target['age']")
-  }
-
-  test ("whole rule") {
-    val rule1 = "$source.name = $target.name AND $source.age = $target.age"
-    val result1 = ruleParser.parseAll(ruleParser.rule, rule1)
-    result1.successful should be (true)
-    result1.get.desc should be ("$source['name'] = $target['name'] AND $source['age'] = $target['age']")
-
-    val rule2 = "$source.name = $target.name AND $source.age = $target.age AND $source.id > 1000"
-    val result2 = ruleParser.parseAll(ruleParser.rule, rule2)
-    result2.successful should be (true)
-    result2.get.desc should be ("$source['name'] = $target['name'] AND $source['age'] = $target['age'] AND $source['id'] > 1000")
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptorTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptorTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptorTest.scala
new file mode 100644
index 0000000..987a060
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptorTest.scala
@@ -0,0 +1,65 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import org.apache.griffin.measure.process.check.DataChecker
+import org.apache.griffin.measure.utils.JsonUtil
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+import org.scalamock.scalatest.MockFactory
+
+@RunWith(classOf[JUnitRunner])
+class GriffinDslAdaptorTest extends FunSuite with Matchers with BeforeAndAfter with MockFactory {
+
+  test ("profiling groupby") {
+    val adaptor = GriffinDslAdaptor("source" :: Nil, "count" :: Nil, RunPhase)
+
+    val ruleJson =
+      """
+        |{
+        |  "dsl.type": "griffin-dsl",
+        |  "dq.type": "profiling",
+        |  "rule": "source.age, (source.user_id.COUNT() + 1s) as cnt group by source.age having source.desc.count() > 5 or false order by user_id desc, user_name asc limit 5",
+        |  "details": {
+        |    "source": "source",
+        |    "profiling": {
+        |      "name": "prof",
+        |      "persist.type": "metric"
+        |    }
+        |  }
+        |}
+      """.stripMargin
+
+    // rule: Map[String, Any]
+    val rule: Map[String, Any] = JsonUtil.toAnyMap(ruleJson)
+    println(rule)
+
+    val dataCheckerMock = mock[DataChecker]
+    dataCheckerMock.existDataSourceName _ expects ("source") returning (true)
+    RuleAdaptorGroup.dataChecker = dataCheckerMock
+
+    val steps = adaptor.genConcreteRuleStep(rule)
+
+    steps.foreach { step =>
+      println(s"${step.name} [${step.dslType}]: ${step.rule}")
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParserTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParserTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParserTest.scala
new file mode 100644
index 0000000..d8c9531
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParserTest.scala
@@ -0,0 +1,205 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.parser
+
+import org.apache.griffin.measure.rule.dsl.expr._
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+//import org.scalatest.FlatSpec
+//import org.scalamock.scalatest.MockFactory
+
+@RunWith(classOf[JUnitRunner])
+class BasicParserTest extends FunSuite with Matchers with BeforeAndAfter {
+
+  val parser = new BasicParser{
+    val dataSourceNames: Seq[String] = "source" :: "target" :: Nil
+    val functionNames: Seq[String] = "func" :: "get_json_object" :: Nil
+    def rootExpression: Parser[Expr] = expression
+  }
+
+  test("test literal") {
+    val rule1 = """null"""
+    val result1 = parser.parseAll(parser.literal, rule1)
+    result1.successful should be (true)
+    result1.get.desc should be ("NULL")
+
+    val rule2 = """nan"""
+    val result2 = parser.parseAll(parser.literal, rule2)
+    result2.successful should be (true)
+    result2.get.desc should be ("NaN")
+
+    val rule3 = """'test\'ing'"""
+    val result3 = parser.parseAll(parser.literal, rule3)
+    result3.successful should be (true)
+    result3.get.desc should be ("'test\\'ing'")
+
+    val rule4 = """"test\" again""""
+    val result4 = parser.parseAll(parser.literal, rule4)
+    result4.successful should be (true)
+    result4.get.desc should be ("\"test\\\" again\"")
+
+    val rule5 = """-1.342"""
+    val result5 = parser.parseAll(parser.literal, rule5)
+    result5.successful should be (true)
+    result5.get.desc should be ("-1.342")
+
+    val rule51 = """33"""
+    val result51 = parser.parseAll(parser.literal, rule51)
+    result51.successful should be (true)
+    result51.get.desc should be ("33")
+
+    val rule6 = """2h"""
+    val result6 = parser.parseAll(parser.literal, rule6)
+    result6.successful should be (true)
+    result6.get.desc should be (s"${2 * 3600 * 1000}")
+
+    val rule7 = """true"""
+    val result7 = parser.parseAll(parser.literal, rule7)
+    result7.successful should be (true)
+    result7.get.desc should be ("true")
+  }
+
+  test ("test selection") {
+    val rule1 = """source"""
+    val result1 = parser.parseAll(parser.selection, rule1)
+    result1.successful should be (true)
+    result1.get.desc should be ("source")
+
+    val rule2 = """source_not_registered"""
+    val result2 = parser.parseAll(parser.selection, rule2)
+    result2.successful should be (false)
+
+    val rule3 = """source[12].age"""
+    val result3 = parser.parseAll(parser.selection, rule3)
+    result3.successful should be (true)
+    result3.get.desc should be ("source[12].age")
+    result3.get.alias should be (Some("age"))
+
+    val rule4 = """source.name.func(target.name)"""
+    val result4 = parser.parseAll(parser.selection, rule4)
+    result4.successful should be (true)
+    result4.get.desc should be ("func(source.name, target.name)")
+  }
+
+  test ("test math") {
+    val rule1 = """-1"""
+    val result1 = parser.parseAll(parser.mathExpression, rule1)
+    result1.successful should be (true)
+    result1.get.desc should be ("(-1)")
+
+    val rule2 = "1 + 1"
+    val result2 = parser.parseAll(parser.mathExpression, rule2)
+    result2.successful should be (true)
+    result2.get.desc should be ("1 + 1")
+
+    val rule3 = "source.age + 2 * 5 + target.offset"
+    val result3 = parser.parseAll(parser.mathExpression, rule3)
+    result3.successful should be (true)
+    result3.get.desc should be ("source.age + 2 * 5 + target.offset")
+
+    val rule4 = "(source.age + 2) * (5 + target.offset)"
+    val result4 = parser.parseAll(parser.mathExpression, rule4)
+    result4.successful should be (true)
+    result4.get.desc should be ("(source.age + 2) * (5 + target.offset)")
+  }
+
+  test ("test logical") {
+    val rule1 = "source.age in (12 + 3, 23, 34)"
+    val result1 = parser.parseAll(parser.logicalExpression, rule1)
+    result1.successful should be (true)
+    result1.get.desc should be ("source.age IN (12 + 3, 23, 34)")
+
+    val rule2 = "source.age between (12 + 3, 23, 34)"
+    val result2 = parser.parseAll(parser.logicalExpression, rule2)
+    result2.successful should be (true)
+    result2.get.desc should be ("source.age BETWEEN 12 + 3 AND 23")
+
+    val rule3 = "source.age between (12 + 3)"
+    assertThrows[Exception](parser.parseAll(parser.logicalExpression, rule3))
+
+    val rule4 = "source.name like '%tk'"
+    val result4 = parser.parseAll(parser.logicalExpression, rule4)
+    result4.successful should be (true)
+    result4.get.desc should be ("source.name LIKE '%tk'")
+
+    val rule5 = "source.desc is not null"
+    val result5 = parser.parseAll(parser.logicalExpression, rule5)
+    result5.successful should be (true)
+    result5.get.desc should be ("source.desc IS NOT NULL")
+
+    val rule6 = "source.desc is not nan"
+    val result6 = parser.parseAll(parser.logicalExpression, rule6)
+    result6.successful should be (true)
+    result6.get.desc should be ("NOT isnan(source.desc)")
+
+    val rule7 = "!source.ok and source.name = target.name && (source.age between 12 and 52) && target.desc is not null"
+    val result7 = parser.parseAll(parser.logicalExpression, rule7)
+    result7.successful should be (true)
+    result7.get.desc should be ("(NOT source.ok) AND source.name = target.name AND (source.age BETWEEN 12 AND 52) AND target.desc IS NOT NULL")
+
+    val rule8 = "!(10 != 30 and !(31 > 2) or (45 <= 8 and 33 <> 0))"
+    val result8 = parser.parseAll(parser.logicalExpression, rule8)
+    result8.successful should be (true)
+    result8.get.desc should be ("(NOT (10 != 30 AND (NOT (31 > 2)) OR (45 <= 8 AND 33 <> 0)))")
+
+  }
+
+  test ("test expression") {
+    val rule3 = "source.age + 2 * 5 + target.offset"
+    val result3 = parser.parseAll(parser.expression, rule3)
+    result3.successful should be (true)
+    result3.get.desc should be ("source.age + 2 * 5 + target.offset")
+
+    val rule4 = "(source.age + 2) * (5 + target.offset)"
+    val result4 = parser.parseAll(parser.expression, rule4)
+    result4.successful should be (true)
+    result4.get.desc should be ("(source.age + 2) * (5 + target.offset)")
+
+    val rule7 = "!source.ok and source.name = target.name && (source.age between 12 and 52) && target.desc is not null"
+    val result7 = parser.parseAll(parser.expression, rule7)
+    result7.successful should be (true)
+    result7.get.desc should be ("(NOT source.ok) AND source.name = target.name AND (source.age BETWEEN 12 AND 52) AND target.desc IS NOT NULL")
+
+    val rule8 = "!(10 != 30 and !(31 > 2) or (45 <= 8 and 33 <> 0))"
+    val result8 = parser.parseAll(parser.expression, rule8)
+    result8.successful should be (true)
+    result8.get.desc should be ("(NOT (10 != 30 AND (NOT (31 > 2)) OR (45 <= 8 AND 33 <> 0)))")
+
+    val rule1 = "source.user_id = target.user_id AND source.first_name = target.first_name AND source.last_name = target.last_name AND source.address = target.address AND source.email = target.email AND source.phone = target.phone AND source.post_code = target.post_code"
+    val result1 = parser.parseAll(parser.expression, rule1)
+    result1.successful should be (true)
+    result1.get.desc should be ("source.user_id = target.user_id AND source.first_name = target.first_name AND source.last_name = target.last_name AND source.address = target.address AND source.email = target.email AND source.phone = target.phone AND source.post_code = target.post_code")
+  }
+
+  test ("test function") {
+    val rule3 = "source.age + 2 * 5 + target.offset * func('a', source.name)"
+    val result3 = parser.parseAll(parser.expression, rule3)
+    result3.successful should be (true)
+    result3.get.desc should be ("source.age + 2 * 5 + target.offset * func('a', source.name)")
+  }
+
+  test ("order by clause") {
+    val rule = "order by source.user_id, item"
+    val result = parser.parseAll(parser.orderbyClause, rule)
+    result.successful should be (true)
+    println(result.get.desc)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala b/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
new file mode 100644
index 0000000..7b23062
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
@@ -0,0 +1,125 @@
+//package org.apache.griffin.measure.sql
+//
+//import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
+//import org.apache.griffin.measure.rule.expr.{Expr, StatementExpr}
+//import org.apache.spark.sql.{DataFrame, SQLContext}
+//import org.apache.spark.sql.types.{ArrayType, IntegerType, StructField, StructType}
+//import org.apache.spark.{SparkConf, SparkContext}
+//import org.junit.runner.RunWith
+//import org.scalatest.junit.JUnitRunner
+//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+//
+//@RunWith(classOf[JUnitRunner])
+//class SqlTest extends FunSuite with BeforeAndAfter with Matchers {
+//
+//  var sc: SparkContext = _
+//  var sqlContext: SQLContext = _
+//
+//  before {
+//    val conf = new SparkConf().setMaster("local[*]").setAppName("test")
+//    sc = new SparkContext(conf)
+//    sqlContext = new SQLContext(sc)
+//  }
+//
+//  test ("spark sql") {
+//
+//    val squared = (s: Int) => {
+//      s * s
+//    }
+//    sqlContext.udf.register("square", squared)
+//
+//    val a = sqlContext.range(1, 20)
+//    a.show
+//
+//    a.registerTempTable("test")
+//
+//    val table = sqlContext.sql("select * from test")
+//    table.show()
+//
+//    val result = sqlContext.sql("select id, square(id) as id_squared from test")
+//    result.show()
+//
+//  }
+//
+//  test ("json") {
+//    def jsonToDataFrame(json: String, schema: Option[StructType] = None): DataFrame = {
+//      val reader = sqlContext.read
+//      val rd = schema match {
+//        case Some(scm) => reader.schema(scm)
+//        case _ => reader
+//      }
+//      rd.json(sc.parallelize(json :: Nil))
+//    }
+//
+//    val json =
+//      """
+//        |{
+//        |  "a": [
+//        |     1, 2, 3
+//        |  ]
+//        |}
+//      """.stripMargin
+//
+////    val bt = StructField("b", IntegerType)
+////    val at = StructField("a", StructType(bt :: Nil))
+////    val schema = StructType(at :: Nil)
+//
+//    val at = StructField("a", ArrayType(IntegerType))
+//    val schema = StructType(at :: Nil)
+//
+//    val df = jsonToDataFrame(json, Some(schema))
+//
+//    df.registerTempTable("json")
+//
+//    val result = sqlContext.sql("select a[1] from json")
+//    result.show
+//
+//  }
+//
+//  test ("json file") {
+//
+//    // read json file directly
+////    val filePath = "src/test/resources/test-data.jsonFile"
+////    val reader = sqlContext.read
+////    val df = reader.json(filePath)
+////    df.show
+////
+////    df.registerTempTable("ttt")
+////    val result = sqlContext.sql("select * from ttt where list[0].c = 11")
+////    result.show
+//
+//    // whole json file
+////    val filePath = "src/test/resources/test-data0.json"
+//////    val filePath = "hdfs://localhost/test/file/t1.json"
+////    val jsonRDD = sc.wholeTextFiles(s"${filePath},${filePath}").map(x => x._2)
+////    val namesJson = sqlContext.read.json(jsonRDD)
+////    namesJson.printSchema
+////    namesJson.show
+//
+//    // read text file then convert to json
+//    val filePath = "src/test/resources/test-data.jsonFile"
+//    val rdd = sc.textFile(filePath)
+//    val reader = sqlContext.read
+//    val df = reader.json(rdd)
+//    df.show
+//    df.printSchema
+//
+//    df.registerTempTable("ttt")
+//    val result = sqlContext.sql("select * from ttt where list[0].c = 11")
+//    result.show
+//
+//    // udf
+//    val slice = (arr: Seq[Long], f: Int, e: Int) => arr.slice(f, e)
+////    val slice = (arr: Seq[Long]) => arr.slice(0, 1)
+//    sqlContext.udf.register("slice", slice)
+//
+//    val result1 = sqlContext.sql("select slice(t, 0, 2) from ttt")
+//    result1.show
+//
+//  }
+//
+//  test ("accu sql") {
+////    val file1 =
+//  }
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/utils/HdfsUtilTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/utils/HdfsUtilTest.scala b/measure/src/test/scala/org/apache/griffin/measure/utils/HdfsUtilTest.scala
new file mode 100644
index 0000000..90db32a
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/utils/HdfsUtilTest.scala
@@ -0,0 +1,132 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.utils
+
+import java.io.{BufferedReader, FileReader, InputStreamReader}
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileSystem, Path}
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.{SparkConf, SparkContext}
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+@RunWith(classOf[JUnitRunner])
+class HdfsUtilTest extends FunSuite with Matchers with BeforeAndAfter {
+
+  private val seprator = "/"
+
+  private val conf1 = new Configuration()
+  conf1.addResource(new Path("file:///apache/hadoop/etc/hadoop/core-site.xml"))
+  conf1.addResource(new Path("file:///apache/hadoop/etc/hadoop/hdfs-site.xml"))
+  private val dfs1 = FileSystem.get(conf1)
+
+  private val conf2 = new Configuration()
+  conf2.addResource(new Path("file:///Users/lliu13/test/hadoop/core-site.xml"))
+  conf2.addResource(new Path("file:///Users/lliu13/test/hadoop/hdfs-site.xml"))
+  private val dfs2 = FileSystem.get(conf2)
+
+  val conf = new SparkConf().setAppName("test_hdfs").setMaster("local[*]")
+  val sparkContext = new SparkContext(conf)
+  sparkContext.setLogLevel("WARN")
+  val sqlContext = new HiveContext(sparkContext)
+
+  def listSubPaths(dfs: FileSystem, dirPath: String, subType: String, fullPath: Boolean = false): Iterable[String] = {
+    val path = new Path(dirPath)
+    try {
+      val fileStatusArray = dfs.listStatus(path)
+      fileStatusArray.filter { fileStatus =>
+        subType match {
+          case "dir" => fileStatus.isDirectory
+          case "file" => fileStatus.isFile
+          case _ => true
+        }
+      }.map { fileStatus =>
+        val fname = fileStatus.getPath.getName
+        if (fullPath) getHdfsFilePath(dirPath, fname) else fname
+      }
+    } catch {
+      case e: Throwable => {
+        println(s"list path files error: ${e.getMessage}")
+        Nil
+      }
+    }
+  }
+
+  def getHdfsFilePath(parentPath: String, fileName: String): String = {
+    if (parentPath.endsWith(seprator)) parentPath + fileName else parentPath + seprator + fileName
+  }
+
+  test ("test multiple hdfs") {
+    val list1 = listSubPaths(dfs1, "/", "dir", false)
+    println(list1)
+
+    val list2 = listSubPaths(dfs2, "/", "dir", false)
+    println(list2)
+
+    val path1 = "/depth/discovery_file_sample.txt"
+    val istream1 = dfs1.open(new Path(path1))
+    val reader1 = new BufferedReader(new InputStreamReader(istream1))
+    val seq1 = scala.collection.mutable.MutableList[String]()
+    try {
+      var line = reader1.readLine()
+      while (line != null) {
+        val arr = line.split("\u0007")
+        seq1 ++= arr
+        line = reader1.readLine()
+      }
+    } finally {
+      reader1.close()
+      istream1.close()
+    }
+
+//    val scanner = new java.util.Scanner(istream1,"UTF-8").useDelimiter("\u0007")
+//    val theString = if (scanner.hasNext()) scanner.next() else ""
+//    println(theString)
+//    scanner.close()
+
+    println(seq1.size)
+    println(seq1.take(10))
+    seq1.take(10).foreach(println)
+
+//    val path2 = "/griffin/json/env.json"
+//    val istream2 = dfs2.open(new Path(path2))
+//    val reader2 = new BufferedReader(new InputStreamReader(istream2))
+//    val seq2 = scala.collection.mutable.MutableList[String]()
+//    try {
+//      var line = reader2.readLine()
+//      while (line != null) {
+//        line = reader2.readLine()
+//        seq2 += line
+//      }
+//    } catch {
+//      case e: Throwable => {
+//        println("error in reading")
+//      }
+//    } finally {
+//      reader2.close()
+//      istream2.close()
+//    }
+//    println(seq2.size)
+//    println(seq2.take(10))
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala b/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
index 4daebb6..233d78c 100644
--- a/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
+++ b/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
@@ -1,60 +1,60 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.utils
-
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-
-
-@RunWith(classOf[JUnitRunner])
-class JsonUtilTest extends FunSuite with Matchers with BeforeAndAfter {
-
-  val map = Map[String, Any](("name" -> "test"), ("age" -> 15))
-  val json = """{"name":"test","age":15}"""
-
-  val person = JsonUtilTest.Person("test", 15)
-
-  test ("toJson 1") {
-    val symbolMap = map.map(p => (Symbol(p._1), p._2))
-    JsonUtil.toJson(symbolMap) should equal (json)
-  }
-
-  test ("toJson 2") {
-    JsonUtil.toJson(map) should equal (json)
-  }
-
-  test ("toMap") {
-    JsonUtil.toMap(json) should equal (map)
-  }
-
-  test ("fromJson 1") {
-    JsonUtil.fromJson[JsonUtilTest.Person](json) should equal (person)
-  }
-
-  test ("fromJson 2") {
-    val is = new java.io.ByteArrayInputStream(json.getBytes("utf-8"));
-    JsonUtil.fromJson[JsonUtilTest.Person](is) should equal (person)
-  }
-
-}
-
-object JsonUtilTest {
-  case class Person(name: String, age: Int){}
-}
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.utils
+//
+//import org.junit.runner.RunWith
+//import org.scalatest.junit.JUnitRunner
+//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+//
+//
+//@RunWith(classOf[JUnitRunner])
+//class JsonUtilTest extends FunSuite with Matchers with BeforeAndAfter {
+//
+//  val map = Map[String, Any](("name" -> "test"), ("age" -> 15))
+//  val json = """{"name":"test","age":15}"""
+//
+//  val person = JsonUtilTest.Person("test", 15)
+//
+//  test ("toJson 1") {
+//    val symbolMap = map.map(p => (Symbol(p._1), p._2))
+//    JsonUtil.toJson(symbolMap) should equal (json)
+//  }
+//
+//  test ("toJson 2") {
+//    JsonUtil.toJson(map) should equal (json)
+//  }
+//
+//  test ("toMap") {
+//    JsonUtil.toMap(json) should equal (map)
+//  }
+//
+//  test ("fromJson 1") {
+//    JsonUtil.fromJson[JsonUtilTest.Person](json) should equal (person)
+//  }
+//
+//  test ("fromJson 2") {
+//    val is = new java.io.ByteArrayInputStream(json.getBytes("utf-8"));
+//    JsonUtil.fromJson[JsonUtilTest.Person](is) should equal (person)
+//  }
+//
+//}
+//
+//object JsonUtilTest {
+//  case class Person(name: String, age: Int){}
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/utils/ParamUtilTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/utils/ParamUtilTest.scala b/measure/src/test/scala/org/apache/griffin/measure/utils/ParamUtilTest.scala
new file mode 100644
index 0000000..5a54b11
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/utils/ParamUtilTest.scala
@@ -0,0 +1,50 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.utils
+
+import java.io.{BufferedReader, InputStreamReader}
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileSystem, Path}
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.{SparkConf, SparkContext}
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+@RunWith(classOf[JUnitRunner])
+class ParamUtilTest extends FunSuite with Matchers with BeforeAndAfter {
+
+  test ("test param util") {
+    val params = Map[String, Any](
+      ("a" -> "321"),
+      ("b" -> 123),
+      ("c" -> 3.2),
+      ("d" -> (213 :: 321 :: Nil))
+    )
+
+    import ParamUtil._
+
+    params.getString("a", "") should be ("321")
+    params.getInt("b", 0) should be (123)
+    params.getBoolean("c", false) should be (false)
+    params.getAnyRef("d", List[Int]()) should be ((213 :: 321 :: Nil))
+  }
+
+}



[07/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/CalculationUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/CalculationUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/CalculationUtil.scala
deleted file mode 100644
index c969012..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/CalculationUtil.scala
+++ /dev/null
@@ -1,315 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import scala.util.{Success, Try}
-
-
-object CalculationUtil {
-
-  implicit def option2CalculationValue(v: Option[_]): CalculationValue = CalculationValue(v)
-
-  // redefine the calculation method of operators in DSL
-  case class CalculationValue(value: Option[_]) extends Serializable {
-
-    def + (other: Option[_]): Option[_] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: String), Some(v2)) => Some(v1 + v2.toString)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 + v2.toString.toByte)
-          case (Some(v1: Short), Some(v2)) => Some(v1 + v2.toString.toShort)
-          case (Some(v1: Int), Some(v2)) => Some(v1 + v2.toString.toInt)
-          case (Some(v1: Long), Some(v2)) => Some(v1 + v2.toString.toLong)
-          case (Some(v1: Float), Some(v2)) => Some(v1 + v2.toString.toFloat)
-          case (Some(v1: Double), Some(v2)) => Some(v1 + v2.toString.toDouble)
-          case _ => value
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def - (other: Option[_]): Option[_] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: Byte), Some(v2)) => Some(v1 - v2.toString.toByte)
-          case (Some(v1: Short), Some(v2)) => Some(v1 - v2.toString.toShort)
-          case (Some(v1: Int), Some(v2)) => Some(v1 - v2.toString.toInt)
-          case (Some(v1: Long), Some(v2)) => Some(v1 - v2.toString.toLong)
-          case (Some(v1: Float), Some(v2)) => Some(v1 - v2.toString.toFloat)
-          case (Some(v1: Double), Some(v2)) => Some(v1 - v2.toString.toDouble)
-          case _ => value
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def * (other: Option[_]): Option[_] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(s1: String), Some(n2: Int)) => Some(s1 * n2)
-          case (Some(s1: String), Some(n2: Long)) => Some(s1 * n2.toInt)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 * v2.toString.toByte)
-          case (Some(v1: Short), Some(v2)) => Some(v1 * v2.toString.toShort)
-          case (Some(v1: Int), Some(v2)) => Some(v1 * v2.toString.toInt)
-          case (Some(v1: Long), Some(v2)) => Some(v1 * v2.toString.toLong)
-          case (Some(v1: Float), Some(v2)) => Some(v1 * v2.toString.toFloat)
-          case (Some(v1: Double), Some(v2)) => Some(v1 * v2.toString.toDouble)
-          case _ => value
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def / (other: Option[_]): Option[_] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: Byte), Some(v2)) => Some(v1 / v2.toString.toByte)
-          case (Some(v1: Short), Some(v2)) => Some(v1 / v2.toString.toShort)
-          case (Some(v1: Int), Some(v2)) => Some(v1 / v2.toString.toInt)
-          case (Some(v1: Long), Some(v2)) => Some(v1 / v2.toString.toLong)
-          case (Some(v1: Float), Some(v2)) => Some(v1 / v2.toString.toFloat)
-          case (Some(v1: Double), Some(v2)) => Some(v1 / v2.toString.toDouble)
-          case _ => value
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def % (other: Option[_]): Option[_] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: Byte), Some(v2)) => Some(v1 % v2.toString.toByte)
-          case (Some(v1: Short), Some(v2)) => Some(v1 % v2.toString.toShort)
-          case (Some(v1: Int), Some(v2)) => Some(v1 % v2.toString.toInt)
-          case (Some(v1: Long), Some(v2)) => Some(v1 % v2.toString.toLong)
-          case (Some(v1: Float), Some(v2)) => Some(v1 % v2.toString.toFloat)
-          case (Some(v1: Double), Some(v2)) => Some(v1 % v2.toString.toDouble)
-          case _ => value
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def unary_- (): Option[_] = {
-      value match {
-        case None => None
-        case Some(null) => None
-        case Some(v: String) => Some(v.reverse.toString)
-        case Some(v: Boolean) => Some(!v)
-        case Some(v: Byte) => Some(-v)
-        case Some(v: Short) => Some(-v)
-        case Some(v: Int) => Some(-v)
-        case Some(v: Long) => Some(-v)
-        case Some(v: Float) => Some(-v)
-        case Some(v: Double) => Some(-v)
-        case Some(v) => Some(v)
-        case _ => None
-      }
-    }
-
-
-    def === (other: Option[_]): Option[Boolean] = {
-      (value, other) match {
-        case (None, None) => Some(true)
-        case (Some(v1), Some(v2)) => Some(v1 == v2)
-        case _ => Some(false)
-      }
-    }
-
-    def =!= (other: Option[_]): Option[Boolean] = {
-      (value, other) match {
-        case (None, None) => Some(false)
-        case (Some(v1), Some(v2)) => Some(v1 != v2)
-        case _ => Some(true)
-      }
-    }
-
-    def > (other: Option[_]): Option[Boolean] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: String), Some(v2: String)) => Some(v1 > v2)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case (Some(v1: Short), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case (Some(v1: Int), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case (Some(v1: Long), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case (Some(v1: Float), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case (Some(v1: Double), Some(v2)) => Some(v1 > v2.toString.toDouble)
-          case _ => None
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def >= (other: Option[_]): Option[Boolean] = {
-      Try {
-        (value, other) match {
-          case (None, None) | (Some(null), Some(null)) => Some(true)
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: String), Some(v2: String)) => Some(v1 >= v2)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case (Some(v1: Short), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case (Some(v1: Int), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case (Some(v1: Long), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case (Some(v1: Float), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case (Some(v1: Double), Some(v2)) => Some(v1 >= v2.toString.toDouble)
-          case _ => None
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def < (other: Option[_]): Option[Boolean] = {
-      Try {
-        (value, other) match {
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: String), Some(v2: String)) => Some(v1 < v2)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case (Some(v1: Short), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case (Some(v1: Int), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case (Some(v1: Long), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case (Some(v1: Float), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case (Some(v1: Double), Some(v2)) => Some(v1 < v2.toString.toDouble)
-          case _ => None
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-    def <= (other: Option[_]): Option[Boolean] = {
-      Try {
-        (value, other) match {
-          case (None, None) | (Some(null), Some(null)) => Some(true)
-          case (None, _) | (_, None) => None
-          case (Some(null), _) | (_, Some(null)) => None
-          case (Some(v1: String), Some(v2: String)) => Some(v1 <= v2)
-          case (Some(v1: Byte), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case (Some(v1: Short), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case (Some(v1: Int), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case (Some(v1: Long), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case (Some(v1: Float), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case (Some(v1: Double), Some(v2)) => Some(v1 <= v2.toString.toDouble)
-          case _ => None
-        }
-      } match {
-        case Success(opt) => opt
-        case _ => None
-      }
-    }
-
-
-    def in (other: Iterable[Option[_]]): Option[Boolean] = {
-      other.foldLeft(Some(false): Option[Boolean]) { (res, next) =>
-        optOr(res, ===(next))
-      }
-    }
-
-    def not_in (other: Iterable[Option[_]]): Option[Boolean] = {
-      other.foldLeft(Some(true): Option[Boolean]) { (res, next) =>
-        optAnd(res, =!=(next))
-      }
-    }
-
-    def between (other: Iterable[Option[_]]): Option[Boolean] = {
-      if (other.size < 2) None else {
-        val (begin, end) = (other.head, other.tail.head)
-        if (begin.isEmpty && end.isEmpty) Some(value.isEmpty)
-        else optAnd(>=(begin), <=(end))
-      }
-    }
-
-    def not_between (other: Iterable[Option[_]]): Option[Boolean] = {
-      if (other.size < 2) None else {
-        val (begin, end) = (other.head, other.tail.head)
-        if (begin.isEmpty && end.isEmpty) Some(value.nonEmpty)
-        else optOr(<(begin), >(end))
-      }
-    }
-
-    def unary_! (): Option[Boolean] = {
-      optNot(value)
-    }
-
-    def && (other: Option[_]): Option[Boolean] = {
-      optAnd(value, other)
-    }
-
-    def || (other: Option[_]): Option[Boolean] = {
-      optOr(value, other)
-    }
-
-
-    private def optNot(a: Option[_]): Option[Boolean] = {
-      a match {
-        case None => None
-        case Some(null) => None
-        case Some(v: Boolean) => Some(!v)
-        case _ => None
-      }
-    }
-    private def optAnd(a: Option[_], b: Option[_]): Option[Boolean] = {
-      (a, b) match {
-        case (None, _) | (_, None) => None
-        case (Some(null), _) | (_, Some(null)) => None
-        case (Some(false), _) | (_, Some(false)) => Some(false)
-        case (Some(b1: Boolean), Some(b2: Boolean)) => Some(b1 && b2)
-        case _ => None
-      }
-    }
-    private def optOr(a: Option[_], b: Option[_]): Option[Boolean] = {
-      (a, b) match {
-        case (None, _) | (_, None) => None
-        case (Some(null), _) | (_, Some(null)) => None
-        case (Some(true), _) | (_, Some(true)) => Some(true)
-        case (Some(b1: Boolean), Some(b2: Boolean)) => Some(b1 || b2)
-        case _ => None
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/DataTypeCalculationUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/DataTypeCalculationUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/DataTypeCalculationUtil.scala
deleted file mode 100644
index 9d027ec..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/DataTypeCalculationUtil.scala
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.spark.sql.types._
-
-object DataTypeCalculationUtil {
-
-  implicit def dataType2CalculationType(tp: DataType): CalculationType = CalculationType(tp)
-
-  case class CalculationType(tp: DataType) extends Serializable {
-    def binaryOpr (other: DataType): DataType = {
-      (tp, other) match {
-        case (NullType, _) | (_, NullType) => NullType
-        case (t, _) => t
-      }
-    }
-    def unaryOpr (): DataType = {
-      tp
-    }
-  }
-
-  case class DataTypeException() extends Exception {}
-
-  def getDataType(value: Any): DataType = {
-    value match {
-      case v: String => StringType
-      case v: Boolean => BooleanType
-      case v: Long => LongType
-      case v: Int => IntegerType
-      case v: Short => ShortType
-      case v: Byte => ByteType
-      case v: Double => DoubleType
-      case v: Float => FloatType
-      case v: Map[_, _] => MapType(getSameDataType(v.keys), getSameDataType(v.values))
-      case v: Iterable[_] => ArrayType(getSameDataType(v))
-      case _ => NullType
-    }
-  }
-
-  private def getSameDataType(values: Iterable[Any]): DataType = {
-    values.foldLeft(NullType: DataType)((a, c) => genericTypeOf(a, getDataType(c)))
-  }
-
-  private def genericTypeOf(dt1: DataType, dt2: DataType): DataType = {
-    if (dt1 == dt2) dt1 else {
-      dt1 match {
-        case NullType => dt2
-        case StringType => dt1
-        case DoubleType => {
-          dt2 match {
-            case StringType => dt2
-            case DoubleType | FloatType | LongType | IntegerType | ShortType | ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case FloatType => {
-          dt2 match {
-            case StringType | DoubleType => dt2
-            case FloatType | LongType | IntegerType | ShortType | ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case LongType => {
-          dt2 match {
-            case StringType | DoubleType | FloatType => dt2
-            case LongType | IntegerType | ShortType | ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case IntegerType => {
-          dt2 match {
-            case StringType | DoubleType | FloatType | LongType => dt2
-            case IntegerType | ShortType | ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case ShortType => {
-          dt2 match {
-            case StringType | DoubleType | FloatType | LongType | IntegerType => dt2
-            case ShortType | ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case ByteType => {
-          dt2 match {
-            case StringType | DoubleType | FloatType | LongType | IntegerType | ShortType => dt2
-            case ByteType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case BooleanType => {
-          dt2 match {
-            case StringType => dt2
-            case BooleanType => dt1
-            case _ => throw DataTypeException()
-          }
-        }
-        case MapType(kdt1, vdt1, _) => {
-          dt2 match {
-            case MapType(kdt2, vdt2, _) => MapType(genericTypeOf(kdt1, kdt2), genericTypeOf(vdt1, vdt2))
-            case _ => throw DataTypeException()
-          }
-        }
-        case ArrayType(vdt1, _) => {
-          dt2 match {
-            case ArrayType(vdt2, _) => ArrayType(genericTypeOf(vdt1, vdt2))
-            case _ => throw DataTypeException()
-          }
-        }
-        case _ => throw DataTypeException()
-      }
-    }
-  }
-
-  def sequenceDataTypeMap(aggr: Map[String, DataType], value: Map[String, Any]): Map[String, DataType] = {
-    val dataTypes = value.foldLeft(Map[String, DataType]()) { (map, pair) =>
-      val (k, v) = pair
-      try {
-        map + (k -> getDataType(v))
-      } catch {
-        case e: DataTypeException => map
-      }
-    }
-    combineDataTypeMap(aggr, dataTypes)
-  }
-
-  def combineDataTypeMap(aggr1: Map[String, DataType], aggr2: Map[String, DataType]): Map[String, DataType] = {
-    aggr2.foldLeft(aggr1) { (a, c) =>
-      a.get(c._1) match {
-        case Some(t) => {
-          try {
-            a + (c._1 -> genericTypeOf(t, c._2))
-          } catch {
-            case e: DataTypeException => a
-          }
-        }
-        case _ => a + c
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/ExprValueUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/ExprValueUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/ExprValueUtil.scala
deleted file mode 100644
index 940d0cb..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/ExprValueUtil.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.rule.expr._
-import org.apache.griffin.measure.rule.func._
-import org.apache.spark.sql.Row
-
-import scala.util.{Success, Try}
-
-object ExprValueUtil {
-
-  private def append(path: List[String], step: String): List[String] = {
-    path :+ step
-  }
-
-  private def value2Map(key: String, value: Option[Any]): Map[String, Any] = {
-    value.flatMap(v => Some((key -> v))).toMap
-  }
-
-  private def getSingleValue(data: Option[Any], desc: FieldDescOnly): Option[Any] = {
-    data match {
-      case Some(row: Row) => {
-        desc match {
-          case i: IndexDesc => try { Some(row.getAs[Any](i.index)) } catch { case _ => None }
-          case f: FieldDesc => try { Some(row.getAs[Any](f.field)) } catch { case _ => None }
-          case _ => None
-        }
-      }
-      case Some(d: Map[String, Any]) => {
-        desc match {
-          case f: FieldDesc => d.get(f.field)
-          case _ => None
-        }
-      }
-      case Some(d: Seq[Any]) => {
-        desc match {
-          case i: IndexDesc => if (i.index >= 0 && i.index < d.size) Some(d(i.index)) else None
-          case _ => None
-        }
-      }
-    }
-  }
-
-  private def calcExprValues(pathDatas: List[(List[String], Option[Any])], expr: Expr, existExprValueMap: Map[String, Any]): List[(List[String], Option[Any])] = {
-    Try {
-      expr match {
-        case selection: SelectionExpr => {
-          selection.selectors.foldLeft(pathDatas) { (pds, selector) =>
-            calcExprValues(pds, selector, existExprValueMap)
-          }
-        }
-        case selector: IndexFieldRangeSelectExpr => {
-          pathDatas.flatMap { pathData =>
-            val (path, data) = pathData
-            data match {
-              case Some(row: Row) => {
-                selector.fields.flatMap { field =>
-                  field match {
-                    case (_: IndexDesc) | (_: FieldDesc) => {
-                      getSingleValue(data, field).map { v => (append(path, field.desc), Some(v)) }
-                    }
-                    case a: AllFieldsDesc => {
-                      (0 until row.size).flatMap { i =>
-                        getSingleValue(data, IndexDesc(i.toString)).map { v =>
-                          (append(path, s"${a.desc}_${i}"), Some(v))
-                        }
-                      }.toList
-                    }
-                    case r: FieldRangeDesc => {
-                      (r.startField, r.endField) match {
-                        case (si: IndexDesc, ei: IndexDesc) => {
-                          (si.index to ei.index).flatMap { i =>
-                            (append(path, s"${r.desc}_${i}"), getSingleValue(data, IndexDesc(i.toString)))
-                            getSingleValue(data, IndexDesc(i.toString)).map { v =>
-                              (append(path, s"${r.desc}_${i}"), Some(v))
-                            }
-                          }.toList
-                        }
-                        case _ => Nil
-                      }
-                    }
-                    case _ => Nil
-                  }
-                }
-              }
-              case Some(d: Map[String, Any]) => {
-                selector.fields.flatMap { field =>
-                  field match {
-                    case (_: IndexDesc) | (_: FieldDesc) => {
-                      getSingleValue(data, field).map { v => (append(path, field.desc), Some(v)) }
-                    }
-                    case a: AllFieldsDesc => {
-                      d.keySet.flatMap { k =>
-                        getSingleValue(data, FieldDesc(k)).map { v =>
-                          (append(path, s"${a.desc}_${k}"), Some(v))
-                        }
-                      }
-                    }
-                    case _ => None
-                  }
-                }
-              }
-              case Some(d: Seq[Any]) => {
-                selector.fields.flatMap { field =>
-                  field match {
-                    case (_: IndexDesc) | (_: FieldDesc) => {
-                      getSingleValue(data, field).map { v => (append(path, field.desc), Some(v)) }
-                    }
-                    case a: AllFieldsDesc => {
-                      (0 until d.size).flatMap { i =>
-                        (append(path, s"${a.desc}_${i}"), getSingleValue(data, IndexDesc(i.toString)))
-                        getSingleValue(data, IndexDesc(i.toString)).map { v =>
-                          (append(path, s"${a.desc}_${i}"), Some(v))
-                        }
-                      }.toList
-                    }
-                    case r: FieldRangeDesc => {
-                      (r.startField, r.endField) match {
-                        case (si: IndexDesc, ei: IndexDesc) => {
-                          (si.index to ei.index).flatMap { i =>
-                            (append(path, s"${r.desc}_${i}"), getSingleValue(data, IndexDesc(i.toString)))
-                            getSingleValue(data, IndexDesc(i.toString)).map { v =>
-                              (append(path, s"${r.desc}_${i}"), Some(v))
-                            }
-                          }.toList
-                        }
-                        case _ => None
-                      }
-                    }
-                    case _ => None
-                  }
-                }
-              }
-            }
-          }
-        }
-        case selector: FunctionOperationExpr => {
-          val args: Array[Option[Any]] = selector.args.map { arg =>
-            arg.calculate(existExprValueMap)
-          }.toArray
-          pathDatas.flatMap { pathData =>
-            val (path, data) = pathData
-            data match {
-              case Some(d: String) => {
-                val res = FunctionUtil.invoke(selector.func, Some(d) +: args)
-                val residx = res.zipWithIndex
-                residx.map { vi =>
-                  val (v, i) = vi
-                  val step = if (i == 0) s"${selector.desc}" else s"${selector.desc}_${i}"
-                  (append(path, step), v)
-                }
-              }
-              case _ => None
-            }
-          }
-        }
-        case selector: FilterSelectExpr => {  // fileter means select the items fit the condition
-          pathDatas.flatMap { pathData =>
-            val (path, data) = pathData
-            data match {
-              case Some(row: Row) => {
-                // right value could not be selection
-                val rmap = value2Map(selector.value._id, selector.value.calculate(existExprValueMap))
-                (0 until row.size).flatMap { i =>
-                  val dt = getSingleValue(data, IndexDesc(i.toString))
-                  val lmap = value2Map(selector.fieldKey, getSingleValue(dt, selector.field))
-                  val partValueMap = lmap ++ rmap
-                  selector.calculate(partValueMap) match {
-                    case Some(true) => Some((append(path, s"${selector.desc}_${i}"), dt))
-                    case _ => None
-                  }
-                }
-              }
-              case Some(d: Map[String, Any]) => {
-                val rmap = value2Map(selector.value._id, selector.value.calculate(existExprValueMap))
-                d.keySet.flatMap { k =>
-                  val dt = getSingleValue(data, FieldDesc(k))
-                  val lmap = value2Map(selector.fieldKey, getSingleValue(dt, selector.field))
-                  val partValueMap = lmap ++ rmap
-                  selector.calculate(partValueMap) match {
-                    case Some(true) => Some((append(path, s"${selector.desc}_${k}"), dt))
-                    case _ => None
-                  }
-                }
-              }
-              case Some(d: Seq[Any]) => {
-                val rmap = value2Map(selector.value._id, selector.value.calculate(existExprValueMap))
-                (0 until d.size).flatMap { i =>
-                  val dt = getSingleValue(data, IndexDesc(i.toString))
-                  val lmap = value2Map(selector.fieldKey, getSingleValue(dt, selector.field))
-                  val partValueMap = lmap ++ rmap
-                  selector.calculate(partValueMap) match {
-                    case Some(true) => Some((append(path, s"${selector.desc}_${i}"), dt))
-                    case _ => None
-                  }
-                }
-              }
-            }
-          }
-        }
-        case _ => {
-          (expr.desc :: Nil, expr.calculate(existExprValueMap)) :: Nil
-        }
-      }
-    } match {
-      case Success(v) => v
-      case _ => Nil
-    }
-  }
-
-  private def calcExprsValues(data: Option[Any], exprs: Iterable[Expr], existExprValueMap: Map[String, Any]): List[Map[String, Any]] = {
-    val selectionValues: Map[String, List[(List[String], Any)]] = exprs.map { expr =>
-      (expr._id, calcExprValues((Nil, data) :: Nil, expr, existExprValueMap).flatMap { pair =>
-        pair._2 match {
-          case Some(v) => Some((pair._1, v))
-          case _ => None
-        }
-      })
-    }.toMap
-    // if exprs is empty, return an empty value map for each row
-    if (selectionValues.isEmpty) List(Map[String, Any]())
-    else SchemaValueCombineUtil.cartesian(selectionValues)
-  }
-
-  // try to calculate some exprs from data and initExprValueMap, generate a new expression value map
-  // depends on origin data and existed expr value map
-  def genExprValueMaps(data: Option[Any], exprs: Iterable[Expr], initExprValueMap: Map[String, Any]): List[Map[String, Any]] = {
-    val (selections, nonSelections) = exprs.partition(_.isInstanceOf[SelectionExpr])
-    val valueMaps = calcExprsValues(data, selections, initExprValueMap)
-    updateExprValueMaps(nonSelections, valueMaps)
-  }
-
-  // with exprValueMap, calculate expressions, update the expression value map
-  // only depends on existed expr value map, only calculation, not need origin data
-  def updateExprValueMaps(exprs: Iterable[Expr], exprValueMaps: List[Map[String, Any]]): List[Map[String, Any]] = {
-    exprValueMaps.map { valueMap =>
-      exprs.foldLeft(valueMap) { (em, expr) =>
-        expr.calculate(em) match {
-          case Some(v) => em + (expr._id -> v)
-          case _ => em
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/RuleAnalyzer.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleAnalyzer.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/RuleAnalyzer.scala
deleted file mode 100644
index 5ec143f..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleAnalyzer.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.rule.expr._
-
-case class RuleAnalyzer(rule: StatementExpr) extends Serializable {
-
-  val constData = ""
-  private val SourceData = "source"
-  private val TargetData = "target"
-
-  val constCacheExprs: Iterable[Expr] = rule.getCacheExprs(constData)
-  private val sourceCacheExprs: Iterable[Expr] = rule.getCacheExprs(SourceData)
-  private val targetCacheExprs: Iterable[Expr] = rule.getCacheExprs(TargetData)
-
-  private val sourcePersistExprs: Iterable[Expr] = rule.getPersistExprs(SourceData)
-  private val targetPersistExprs: Iterable[Expr] = rule.getPersistExprs(TargetData)
-
-  val constFinalCacheExprs: Iterable[Expr] = rule.getFinalCacheExprs(constData).toSet
-  private val sourceFinalCacheExprs: Iterable[Expr] = rule.getFinalCacheExprs(SourceData).toSet ++ sourcePersistExprs.toSet
-  private val targetFinalCacheExprs: Iterable[Expr] = rule.getFinalCacheExprs(TargetData).toSet ++ targetPersistExprs.toSet
-
-  private val groupbyExprPairs: Seq[(Expr, Expr)] = rule.getGroupbyExprPairs((SourceData, TargetData))
-  private val sourceGroupbyExprs: Seq[Expr] = groupbyExprPairs.map(_._1)
-  private val targetGroupbyExprs: Seq[Expr] = groupbyExprPairs.map(_._2)
-
-  val sourceRuleExprs: RuleExprs = RuleExprs(sourceGroupbyExprs, sourceCacheExprs,
-    sourceFinalCacheExprs, sourcePersistExprs)
-  val targetRuleExprs: RuleExprs = RuleExprs(targetGroupbyExprs, targetCacheExprs,
-    targetFinalCacheExprs, targetPersistExprs)
-
-}
-
-
-// for a single data source
-// groupbyExprs: in accuracy case, these exprs could be groupby exprs
-//                  Data keys for accuracy case, generated by the equal statements, to improve the calculation efficiency
-// cacheExprs: the exprs value could be caculated independently, and cached for later use
-//                  Cached for the finalCacheExprs calculation, it has some redundant values, saving it wastes a lot
-// finalCacheExprs: the root of cachedExprs, cached for later use, plus with persistExprs
-//                  Cached for the calculation usage, and can be saved for the re-calculation in streaming mode
-// persistExprs: the expr values should be persisted, only the direct selection exprs are persistable
-//                  Persisted for record usage, to record the missing data, need be readable as raw data
-case class RuleExprs(groupbyExprs: Seq[Expr],
-                     cacheExprs: Iterable[Expr],
-                     finalCacheExprs: Iterable[Expr],
-                     persistExprs: Iterable[Expr]
-                    ) extends Serializable {
-  // for example: for a rule "$source.name = $target.name AND $source.age < $target.age + (3 * 4)"
-  // in this rule, for the target data source, the targetRuleExprs looks like below
-  // groupbyExprs: $target.name
-  // cacheExprs: $target.name, $target.age, $target.age + (3 * 4)
-  // finalCacheExprs: $target.name, $target.age + (3 * 4), $target.age
-  // persistExprs: $target.name, $target.age
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/RuleFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/RuleFactory.scala
deleted file mode 100644
index bbaf5cb..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleFactory.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.config.params.user._
-
-import scala.util.Failure
-//import org.apache.griffin.measure.rule.expr_old._
-import org.apache.griffin.measure.rule.expr._
-
-import scala.util.{Success, Try}
-
-
-case class RuleFactory(evaluateRuleParam: EvaluateRuleParam) {
-
-  val ruleParser: RuleParser = RuleParser()
-
-  def generateRule(): StatementExpr = {
-    val rules = evaluateRuleParam.rules
-    val statement = parseExpr(rules) match {
-      case Success(se) => se
-      case Failure(ex) => throw ex
-    }
-    statement
-  }
-
-  private def parseExpr(rules: String): Try[StatementExpr] = {
-    Try {
-      val result = ruleParser.parseAll(ruleParser.rule, rules)
-      if (result.successful) result.get
-      else throw new Exception("parse rule error!")
-//      throw new Exception("parse rule error!")
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/RuleParser.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleParser.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/RuleParser.scala
deleted file mode 100644
index 55d9f45..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/RuleParser.scala
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.rule.expr._
-
-import scala.util.parsing.combinator._
-
-case class RuleParser() extends JavaTokenParsers with Serializable {
-
-  /**
-    * BNF representation for grammar as below:
-    *
-    * <rule> ::= <logical-statement> [WHEN <logical-statement>]
-    * rule: mapping-rule [WHEN when-rule]
-    * - mapping-rule: the first level opr should better not be OR | NOT, otherwise it can't automatically find the groupby column
-    * - when-rule: only contain the general info of data source, not the special info of each data row
-    *
-    * <logical-statement> ::= [NOT] <logical-expression> [(AND | OR) <logical-expression>]+ | "(" <logical-statement> ")"
-    * logical-statement: return boolean value
-    * logical-operator: "AND" | "&&", "OR" | "||", "NOT" | "!"
-    *
-    * <logical-expression> ::= <math-expr> (<compare-opr> <math-expr> | <range-opr> <range-expr>)
-    * logical-expression example: $source.id = $target.id, $source.page_id IN ('3214', '4312', '60821')
-    *
-    * <compare-opr> ::= "=" | "!=" | "<" | ">" | "<=" | ">="
-    * <range-opr> ::= ["NOT"] "IN" | "BETWEEN"
-    * <range-expr> ::= "(" [<math-expr>] [, <math-expr>]+ ")"
-    * range-expr example: ('3214', '4312', '60821'), (10, 15), ()
-    *
-    * <math-expr> ::= [<unary-opr>] <math-factor> [<binary-opr> <math-factor>]+
-    * math-expr example: $source.price * $target.count, "hello" + " " + "world" + 123
-    *
-    * <binary-opr> ::= "+" | "-" | "*" | "/" | "%"
-    * <unary-opr> ::= "+" | "-"
-    *
-    * <math-factor> ::= <literal> | <selection> | "(" <math-expr> ")"
-    *
-    * <selection> ::= <selection-head> [ <field-sel> | <function-operation> | <index-field-range-sel> | <filter-sel> ]+
-    * selection example: $source.price, $source.json(), $source['state'], $source.numList[3], $target.json().mails['org' = 'apache'].names[*]
-    *
-    * <selection-head> ::= $source | $target
-    *
-    * <field-sel> ::= "." <field-string>
-    *
-    * <function-operation> ::= "." <function-name> "(" <arg> [, <arg>]+ ")"
-    * <function-name> ::= <name-string>
-    * <arg> ::= <math-expr>
-    *
-    * <index-field-range-sel> ::= "[" <index-field-range> [, <index-field-range>]+ "]"
-    * <index-field-range> ::= <index-field> | (<index-field>, <index-field>) | "*"
-    * index-field-range: 2 means the 3rd item, (0, 3) means first 4 items, * means all items, 'age' means item 'age'
-    * <index-field> ::= <index> | <field-quote> | <all-selection>
-    * index: 0 ~ n means position from start, -1 ~ -n means position from end
-    * <field-quote> ::= ' <field-string> ' | " <field-string> "
-    *
-    * <filter-sel> ::= "[" <field-quote> <filter-compare-opr> <math-expr> "]"
-    * <filter-compare-opr> ::= "=" | "!=" | "<" | ">" | "<=" | ">="
-    * filter-sel example: ['name' = 'URL'], $source.man['age' > $source.graduate_age + 5 ]
-    *
-    * When <math-expr> in the selection, it mustn't contain the different <selection-head>, for example:
-    * $source.tags[1+2]             valid
-    * $source.tags[$source.first]   valid
-    * $source.tags[$target.first]   invalid
-    * -- Such job is for validation, not for parser
-    *
-    *
-    * <literal> ::= <literal-string> | <literal-number> | <literal-time> | <literal-boolean> | <literal-null> | <literal-none>
-    * <literal-string> ::= <any-string>
-    * <literal-number> ::= <integer> | <double>
-    * <literal-time> ::= <integer> ("d"|"h"|"m"|"s"|"ms")
-    * <literal-boolean> ::= true | false
-    * <literal-null> ::= null | undefined
-    * <literal-none> ::= none
-    *
-    */
-
-  object Keyword {
-    def WhenKeywords: Parser[String] = """(?i)when""".r
-    def UnaryLogicalKeywords: Parser[String] = """(?i)not""".r
-    def BinaryLogicalKeywords: Parser[String] = """(?i)and|or""".r
-    def RangeKeywords: Parser[String] = """(?i)(not\s+)?(in|between)""".r
-    def DataSourceKeywords: Parser[String] = """(?i)\$(source|target)""".r
-    def Keywords: Parser[String] = WhenKeywords | UnaryLogicalKeywords | BinaryLogicalKeywords | RangeKeywords | DataSourceKeywords
-  }
-  import Keyword._
-
-  object Operator {
-    def NotLogicalOpr: Parser[String] = """(?i)not""".r | "!"
-    def AndLogicalOpr: Parser[String] = """(?i)and""".r | "&&"
-    def OrLogicalOpr: Parser[String] = """(?i)or""".r | "||"
-    def CompareOpr: Parser[String] = """!?==?""".r | """<=?""".r | """>=?""".r
-    def RangeOpr: Parser[String] = RangeKeywords
-
-    def UnaryMathOpr: Parser[String] = "+" | "-"
-    def BinaryMathOpr1: Parser[String] = "*" | "/" | "%"
-    def BinaryMathOpr2: Parser[String] = "+" | "-"
-
-    def FilterCompareOpr: Parser[String] = """!?==?""".r | """<=?""".r | """>=?""".r
-
-    def SqBracketPair: (Parser[String], Parser[String]) = ("[", "]")
-    def BracketPair: (Parser[String], Parser[String]) = ("(", ")")
-    def Dot: Parser[String] = "."
-    def AllSelection: Parser[String] = "*"
-    def SQuote: Parser[String] = "'"
-    def DQuote: Parser[String] = "\""
-    def Comma: Parser[String] = ","
-  }
-  import Operator._
-
-  object SomeString {
-//    def AnyString: Parser[String] = """[^'\"{}\[\]()=<>.$@,;+\-*/\\]*""".r
-    def AnyString: Parser[String] = """[^'\"]*""".r
-    def SimpleFieldString: Parser[String] = """\w+""".r
-    def FieldString: Parser[String] = """[\w\s]+""".r
-    def NameString: Parser[String] = """[a-zA-Z_]\w*""".r
-  }
-  import SomeString._
-
-  object SomeNumber {
-    def IntegerNumber: Parser[String] = """[+\-]?\d+""".r
-    def DoubleNumber: Parser[String] = """[+\-]?(\.\d+|\d+\.\d*)""".r
-    def IndexNumber: Parser[String] = IntegerNumber
-  }
-  import SomeNumber._
-
-  // -- literal --
-  def literal: Parser[LiteralExpr] = literialString | literialTime | literialNumber | literialBoolean | literialNull | literialNone
-  def literialString: Parser[LiteralStringExpr] = (SQuote ~> AnyString <~ SQuote | DQuote ~> AnyString <~ DQuote) ^^ { LiteralStringExpr(_) }
-  def literialNumber: Parser[LiteralNumberExpr] = (DoubleNumber | IntegerNumber) ^^ { LiteralNumberExpr(_) }
-  def literialTime: Parser[LiteralTimeExpr] = """(\d+(d|h|m|s|ms))+""".r ^^ { LiteralTimeExpr(_) }
-  def literialBoolean: Parser[LiteralBooleanExpr] = ("""(?i)true""".r | """(?i)false""".r) ^^ { LiteralBooleanExpr(_) }
-  def literialNull: Parser[LiteralNullExpr] = ("""(?i)null""".r | """(?i)undefined""".r) ^^ { LiteralNullExpr(_) }
-  def literialNone: Parser[LiteralNoneExpr] = """(?i)none""".r ^^ { LiteralNoneExpr(_) }
-
-  // -- selection --
-  // <selection> ::= <selection-head> [ <field-sel> | <function-operation> | <index-field-range-sel> | <filter-sel> ]+
-  def selection: Parser[SelectionExpr] = selectionHead ~ rep(selector) ^^ {
-    case head ~ selectors => SelectionExpr(head, selectors)
-  }
-  def selector: Parser[SelectExpr] = (functionOperation | fieldSelect | indexFieldRangeSelect | filterSelect)
-
-  def selectionHead: Parser[SelectionHead] = DataSourceKeywords ^^ { SelectionHead(_) }
-  // <field-sel> ::= "." <field-string>
-  def fieldSelect: Parser[IndexFieldRangeSelectExpr] = Dot ~> SimpleFieldString ^^ {
-    case field => IndexFieldRangeSelectExpr(FieldDesc(field) :: Nil)
-  }
-  // <function-operation> ::= "." <function-name> "(" <arg> [, <arg>]+ ")"
-  def functionOperation: Parser[FunctionOperationExpr] = Dot ~ NameString ~ BracketPair._1 ~ repsep(argument, Comma) ~ BracketPair._2 ^^ {
-    case _ ~ func ~ _ ~ args ~ _ => FunctionOperationExpr(func, args)
-  }
-  def argument: Parser[MathExpr] = mathExpr
-  // <index-field-range-sel> ::= "[" <index-field-range> [, <index-field-range>]+ "]"
-  def indexFieldRangeSelect: Parser[IndexFieldRangeSelectExpr] = SqBracketPair._1 ~> rep1sep(indexFieldRange, Comma) <~ SqBracketPair._2 ^^ {
-    case ifrs => IndexFieldRangeSelectExpr(ifrs)
-  }
-  // <index-field-range> ::= <index-field> | (<index-field>, <index-field>) | "*"
-  def indexFieldRange: Parser[FieldDescOnly] = indexField | BracketPair._1 ~ indexField ~ Comma ~ indexField ~ BracketPair._2 ^^ {
-    case _ ~ if1 ~ _ ~ if2 ~ _ => FieldRangeDesc(if1, if2)
-  }
-  // <index-field> ::= <index> | <field-quote> | <all-selection>
-  // *here it can parse <math-expr>, but for simple situation, not supported now*
-  def indexField: Parser[FieldDescOnly] = IndexNumber ^^ { IndexDesc(_) } | fieldQuote | AllSelection ^^ { AllFieldsDesc(_) }
-  // <field-quote> ::= ' <field-string> ' | " <field-string> "
-  def fieldQuote: Parser[FieldDesc] = (SQuote ~> FieldString <~ SQuote | DQuote ~> FieldString <~ DQuote) ^^ { FieldDesc(_) }
-  // <filter-sel> ::= "[" <field-quote> <filter-compare-opr> <math-expr> "]"
-  def filterSelect: Parser[FilterSelectExpr] = SqBracketPair._1 ~> fieldQuote ~ FilterCompareOpr ~ mathExpr <~ SqBracketPair._2 ^^ {
-    case field ~ compare ~ value => FilterSelectExpr(field, compare, value)
-  }
-
-  // -- math --
-  // <math-factor> ::= <literal> | <selection> | "(" <math-expr> ")"
-  def mathFactor: Parser[MathExpr] = (literal | selection | BracketPair._1 ~> mathExpr <~ BracketPair._2) ^^ { MathFactorExpr(_) }
-  // <math-expr> ::= [<unary-opr>] <math-factor> [<binary-opr> <math-factor>]+
-  // <unary-opr> ::= "+" | "-"
-  def unaryMathExpr: Parser[MathExpr] = rep(UnaryMathOpr) ~ mathFactor ^^ {
-    case Nil ~ a => a
-    case list ~ a => UnaryMathExpr(list, a)
-  }
-  // <binary-opr> ::= "+" | "-" | "*" | "/" | "%"
-  def binaryMathExpr1: Parser[MathExpr] = unaryMathExpr ~ rep(BinaryMathOpr1 ~ unaryMathExpr) ^^ {
-    case a ~ Nil => a
-    case a ~ list => BinaryMathExpr(a, list.map(c => (c._1, c._2)))
-  }
-  def binaryMathExpr2: Parser[MathExpr] = binaryMathExpr1 ~ rep(BinaryMathOpr2 ~ binaryMathExpr1) ^^ {
-    case a ~ Nil => a
-    case a ~ list => BinaryMathExpr(a, list.map(c => (c._1, c._2)))
-  }
-  def mathExpr: Parser[MathExpr] = binaryMathExpr2
-
-  // -- logical expression --
-  // <range-expr> ::= "(" [<math-expr>] [, <math-expr>]+ ")"
-  def rangeExpr: Parser[RangeDesc] = BracketPair._1 ~> repsep(mathExpr, Comma) <~ BracketPair._2 ^^ { RangeDesc(_) }
-  // <logical-expression> ::= <math-expr> (<compare-opr> <math-expr> | <range-opr> <range-expr>)
-  def logicalExpr: Parser[LogicalExpr] = mathExpr ~ CompareOpr ~ mathExpr ^^ {
-    case left ~ opr ~ right => LogicalCompareExpr(left, opr, right)
-  } | mathExpr ~ RangeOpr ~ rangeExpr ^^ {
-    case left ~ opr ~ range => LogicalRangeExpr(left, opr, range)
-  } | mathExpr ^^ { LogicalSimpleExpr(_) }
-
-  // -- logical statement --
-  def logicalFactor: Parser[LogicalExpr] = logicalExpr | BracketPair._1 ~> logicalStatement <~ BracketPair._2
-  def notLogicalStatement: Parser[LogicalExpr] = rep(NotLogicalOpr) ~ logicalFactor ^^ {
-    case Nil ~ a => a
-    case list ~ a => UnaryLogicalExpr(list, a)
-  }
-  def andLogicalStatement: Parser[LogicalExpr] = notLogicalStatement ~ rep(AndLogicalOpr ~ notLogicalStatement) ^^ {
-    case a ~ Nil => a
-    case a ~ list => BinaryLogicalExpr(a, list.map(c => (c._1, c._2)))
-  }
-  def orLogicalStatement: Parser[LogicalExpr] = andLogicalStatement ~ rep(OrLogicalOpr ~ andLogicalStatement) ^^ {
-    case a ~ Nil => a
-    case a ~ list => BinaryLogicalExpr(a, list.map(c => (c._1, c._2)))
-  }
-  // <logical-statement> ::= [NOT] <logical-expression> [(AND | OR) <logical-expression>]+ | "(" <logical-statement> ")"
-  def logicalStatement: Parser[LogicalExpr] = orLogicalStatement
-
-  // -- clause statement --
-  def whereClause: Parser[WhereClauseExpr] = logicalStatement ^^ { WhereClauseExpr(_) }
-  def whenClause: Parser[WhenClauseExpr] = WhenKeywords ~> logicalStatement ^^ { WhenClauseExpr(_) }
-
-  // -- rule --
-  // <rule> ::= <logical-statement> [WHEN <logical-statement>]
-  def rule: Parser[StatementExpr] = whereClause ~ opt(whenClause) ^^ {
-    case a ~ b => StatementExpr(a, b)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/SchemaValueCombineUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/SchemaValueCombineUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/SchemaValueCombineUtil.scala
deleted file mode 100644
index ed3b3fc..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/SchemaValueCombineUtil.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-object SchemaValueCombineUtil {
-
-  // Map[String, List[(List[String], T)]]: Map[key, List[(path, value)]]
-  def cartesian[T](valuesMap: Map[String, List[(List[String], T)]]): List[Map[String, T]] = {
-    val fieldsList: List[(String, List[(List[String], T)])] = valuesMap.toList
-
-    // List[key, List[(path, value)]] to List[(path, (key, value))]
-    val valueList: List[(List[String], (String, T))] = fieldsList.flatMap { fields =>
-      val (key, list) = fields
-      list.map { pv =>
-        val (path, value) = pv
-        (path, (key, value))
-      }
-    }
-
-    // 1. generate tree from value list, and return root node
-    val root = TreeUtil.genRootTree(valueList)
-
-    // 2. deep first visit tree from root, merge datas into value map list
-    val valueMapList: List[Map[String, _]] = TreeUtil.mergeDatasIntoMap(root, Nil)
-
-    // 3. simple change
-    val result = valueMapList.map { mp =>
-      mp.map { kv =>
-        val (k, v) = kv
-        (k, v.asInstanceOf[T])
-      }
-    }
-
-    result
-
-  }
-
-
-  case class TreeNode(key: String, var datas: List[(String, _)]) {
-    var children = List[TreeNode]()
-    def addChild(node: TreeNode): Unit = children = children :+ node
-    def mergeSelf(node: TreeNode): Unit = datas = datas ::: node.datas
-  }
-
-  object TreeUtil {
-    private def genTree(path: List[String], datas: List[(String, _)]): Option[TreeNode] = {
-      path match {
-        case Nil => None
-        case head :: tail => {
-          genTree(tail, datas) match {
-            case Some(child) => {
-              val curNode = TreeNode(head, Nil)
-              curNode.addChild(child)
-              Some(curNode)
-            }
-            case _ => Some(TreeNode(head, datas))
-          }
-        }
-      }
-    }
-
-    private def mergeTrees(trees: List[TreeNode], newTreeOpt: Option[TreeNode]): List[TreeNode] = {
-      newTreeOpt match {
-        case Some(newTree) => {
-          trees.find(tree => tree.key == newTree.key) match {
-            case Some(tree) => {
-              // children merge
-              for (child <- newTree.children) {
-                tree.children = mergeTrees(tree.children, Some(child))
-              }
-              // self data merge
-              tree.mergeSelf(newTree)
-              trees
-            }
-            case _ => trees :+ newTree
-          }
-        }
-        case _ => trees
-      }
-    }
-
-    private def root(): TreeNode = TreeNode("", Nil)
-
-    def genRootTree(values: List[(List[String], (String, _))]): TreeNode = {
-      val rootNode = root()
-      val nodeOpts = values.map(value => genTree(value._1, value._2 :: Nil))
-      rootNode.children = nodeOpts.foldLeft(List[TreeNode]()) { (trees, treeOpt) =>
-        mergeTrees(trees, treeOpt)
-      }
-      rootNode
-    }
-
-    private def add(mapList1: List[Map[String, _]], mapList2: List[Map[String, _]]):  List[Map[String, _]] = {
-      mapList1 ::: mapList2
-    }
-    private def multiply(mapList1: List[Map[String, _]], mapList2: List[Map[String, _]]):  List[Map[String, _]] = {
-      mapList1.flatMap { map1 =>
-        mapList2.map { map2 =>
-          map1 ++ map2
-        }
-      }
-    }
-
-    private def keysList(mapList: List[Map[String, _]]): List[String] = {
-      val keySet = mapList match {
-        case Nil => Set[String]()
-        case head :: _ => head.keySet
-      }
-      keySet.toList
-    }
-
-    def mergeDatasIntoMap(root: TreeNode, mapDatas: List[Map[String, _]]): List[Map[String, _]] = {
-      val childrenKeysMapDatas = root.children.foldLeft(Map[List[String], List[Map[String, _]]]()) { (keysMap, child) =>
-        val childMdts = mergeDatasIntoMap(child, List[Map[String, _]]())
-        childMdts match {
-          case Nil => keysMap
-          case _ => {
-            val keys = keysList(childMdts)
-            val afterList = keysMap.get(keys) match {
-              case Some(list) => add(list, childMdts)
-              case _ => childMdts
-            }
-            keysMap + (keys -> afterList)
-          }
-        }
-      }
-      val childrenMergeMaps = childrenKeysMapDatas.values.foldLeft(List[Map[String, _]]()) { (originList, list) =>
-        originList match {
-          case Nil => list
-          case _ => multiply(originList, list)
-        }
-      }
-      val result = mergeNodeChildrenDatasIntoMap(root, childrenMergeMaps)
-      result
-    }
-
-    private def mergeNodeChildrenDatasIntoMap(node: TreeNode, mapDatas: List[Map[String, _]]): List[Map[String, _]] = {
-      val datas: List[(String, (String, Any))] = node.children.flatMap { child =>
-        child.datas.map(dt => (dt._1, (child.key, dt._2)))
-      }
-      val childrenDataKeys: Set[String] = datas.map(_._1).toSet
-      val childrenDataLists: Map[String, List[(String, _)]] = datas.foldLeft(childrenDataKeys.map(k => (k, List[(String, _)]())).toMap) { (maps, data) =>
-        maps.get(data._1) match {
-          case Some(list) => maps + (data._1 -> (list :+ data._2))
-          case _ => maps
-        }
-      }
-
-      // multiply different key datas
-      childrenDataLists.foldLeft(mapDatas) { (mdts, klPair) =>
-        val (key, list) = klPair
-        mdts match {
-          case Nil => list.map(pr => Map[String, Any]((key -> pr._2)))
-          case _ => {
-            list.flatMap { kvPair =>
-              val (path, value) = kvPair
-              mdts.map { mp =>
-                mp + (key -> value)
-              }
-            }
-          }
-        }
-      }
-
-    }
-  }
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/AdaptPhase.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/AdaptPhase.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/AdaptPhase.scala
new file mode 100644
index 0000000..26db78d
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/AdaptPhase.scala
@@ -0,0 +1,25 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+sealed trait AdaptPhase {}
+
+final case object PreProcPhase extends AdaptPhase {}
+
+final case object RunPhase extends AdaptPhase {}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/DataFrameOprAdaptor.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/DataFrameOprAdaptor.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/DataFrameOprAdaptor.scala
new file mode 100644
index 0000000..eb57838
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/DataFrameOprAdaptor.scala
@@ -0,0 +1,44 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import org.apache.griffin.measure.process.ProcessType
+import org.apache.griffin.measure.rule.step._
+
+case class DataFrameOprAdaptor(adaptPhase: AdaptPhase) extends RuleAdaptor {
+
+  def genRuleStep(param: Map[String, Any]): Seq[RuleStep] = {
+    DfOprStep(getName(param), getRule(param), getDetails(param),
+      getPersistType(param), getUpdateDataSource(param)) :: Nil
+  }
+  def adaptConcreteRuleStep(ruleStep: RuleStep): Seq[ConcreteRuleStep] = {
+    ruleStep match {
+      case rs @ DfOprStep(_, _, _, _, _) => rs :: Nil
+      case _ => Nil
+    }
+  }
+
+  def getTempSourceNames(param: Map[String, Any]): Seq[String] = {
+    param.get(_name) match {
+      case Some(name) => name.toString :: Nil
+      case _ => Nil
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptor.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptor.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptor.scala
new file mode 100644
index 0000000..2a189d4
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/GriffinDslAdaptor.scala
@@ -0,0 +1,349 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.dsl.analyzer._
+import org.apache.griffin.measure.rule.dsl.expr._
+import org.apache.griffin.measure.rule.dsl.parser.GriffinDslParser
+import org.apache.griffin.measure.rule.step._
+import org.apache.griffin.measure.utils.ParamUtil._
+
+case class GriffinDslAdaptor(dataSourceNames: Seq[String],
+                             functionNames: Seq[String],
+                             adaptPhase: AdaptPhase
+                            ) extends RuleAdaptor {
+
+  object StepInfo {
+    val _Name = "name"
+    val _PersistType = "persist.type"
+    val _UpdateDataSource = "update.data.source"
+    def getNameOpt(param: Map[String, Any]): Option[String] = param.get(_Name).map(_.toString)
+    def getPersistType(param: Map[String, Any]): PersistType = PersistType(param.getString(_PersistType, ""))
+    def getUpdateDataSourceOpt(param: Map[String, Any]): Option[String] = param.get(_UpdateDataSource).map(_.toString)
+  }
+  object AccuracyInfo {
+    val _Source = "source"
+    val _Target = "target"
+    val _MissRecords = "miss.records"
+    val _Accuracy = "accuracy"
+    val _Miss = "miss"
+    val _Total = "total"
+    val _Matched = "matched"
+  }
+  object ProfilingInfo {
+    val _Source = "source"
+    val _Profiling = "profiling"
+  }
+
+  def getNameOpt(param: Map[String, Any], key: String): Option[String] = param.get(key).map(_.toString)
+  def resultName(param: Map[String, Any], key: String): String = {
+    val nameOpt = param.get(key) match {
+      case Some(prm: Map[String, Any]) => StepInfo.getNameOpt(prm)
+      case _ => None
+    }
+    nameOpt.getOrElse(key)
+  }
+  def resultPersistType(param: Map[String, Any], key: String, defPersistType: PersistType): PersistType = {
+    param.get(key) match {
+      case Some(prm: Map[String, Any]) => StepInfo.getPersistType(prm)
+      case _ => defPersistType
+    }
+  }
+  def resultUpdateDataSourceOpt(param: Map[String, Any], key: String): Option[String] = {
+    param.get(key) match {
+      case Some(prm: Map[String, Any]) => StepInfo.getUpdateDataSourceOpt(prm)
+      case _ => None
+    }
+  }
+
+  val _dqType = "dq.type"
+
+  protected def getDqType(param: Map[String, Any]) = DqType(param.getString(_dqType, ""))
+
+  val filteredFunctionNames = functionNames.filter { fn =>
+    fn.matches("""^[a-zA-Z_]\w*$""")
+  }
+  val parser = GriffinDslParser(dataSourceNames, filteredFunctionNames)
+
+  def genRuleStep(param: Map[String, Any]): Seq[RuleStep] = {
+    GriffinDslStep(getName(param), getRule(param), getDqType(param), getDetails(param)) :: Nil
+  }
+
+  def getTempSourceNames(param: Map[String, Any]): Seq[String] = {
+    val dqType = getDqType(param)
+    param.get(_name) match {
+      case Some(name) => {
+        dqType match {
+          case AccuracyType => {
+            Seq[String](
+              resultName(param, AccuracyInfo._MissRecords),
+              resultName(param, AccuracyInfo._Accuracy)
+            )
+          }
+          case ProfilingType => {
+            Seq[String](
+              resultName(param, ProfilingInfo._Profiling)
+            )
+          }
+          case TimelinessType => {
+            Nil
+          }
+          case _ => Nil
+        }
+      }
+      case _ => Nil
+    }
+  }
+
+  def adaptConcreteRuleStep(ruleStep: RuleStep): Seq[ConcreteRuleStep] = {
+    ruleStep match {
+      case rs @ GriffinDslStep(_, rule, dqType, _) => {
+        val exprOpt = try {
+          val result = parser.parseRule(rule, dqType)
+          if (result.successful) Some(result.get)
+          else {
+            println(result)
+            warn(s"adapt concrete rule step warn: parse rule [ ${rule} ] fails")
+            None
+          }
+        } catch {
+          case e: Throwable => {
+            error(s"adapt concrete rule step error: ${e.getMessage}")
+            None
+          }
+        }
+
+        exprOpt match {
+          case Some(expr) => {
+            try {
+              transConcreteRuleSteps(rs, expr)
+            } catch {
+              case e: Throwable => {
+                error(s"trans concrete rule step error: ${e.getMessage}")
+                Nil
+              }
+            }
+          }
+          case _ => Nil
+        }
+      }
+      case _ => Nil
+    }
+  }
+
+  private def transConcreteRuleSteps(ruleStep: GriffinDslStep, expr: Expr
+                                    ): Seq[ConcreteRuleStep] = {
+    val details = ruleStep.details
+    ruleStep.dqType match {
+      case AccuracyType => {
+        val sourceName = getNameOpt(details, AccuracyInfo._Source) match {
+          case Some(name) => name
+          case _ => dataSourceNames.head
+        }
+        val targetName = getNameOpt(details, AccuracyInfo._Target) match {
+          case Some(name) => name
+          case _ => dataSourceNames.tail.head
+        }
+        val analyzer = AccuracyAnalyzer(expr.asInstanceOf[LogicalExpr], sourceName, targetName)
+
+
+        if (!checkDataSourceExists(sourceName)) {
+          Nil
+        } else {
+          // 1. miss record
+          val missRecordsSql = if (!checkDataSourceExists(targetName)) {
+            val selClause = s"`${sourceName}`.*"
+            s"SELECT ${selClause} FROM `${sourceName}`"
+          } else {
+            val selClause = s"`${sourceName}`.*"
+            val onClause = expr.coalesceDesc
+            val sourceIsNull = analyzer.sourceSelectionExprs.map { sel =>
+              s"${sel.desc} IS NULL"
+            }.mkString(" AND ")
+            val targetIsNull = analyzer.targetSelectionExprs.map { sel =>
+              s"${sel.desc} IS NULL"
+            }.mkString(" AND ")
+            val whereClause = s"(NOT (${sourceIsNull})) AND (${targetIsNull})"
+            s"SELECT ${selClause} FROM `${sourceName}` LEFT JOIN `${targetName}` ON ${onClause} WHERE ${whereClause}"
+          }
+          val missRecordsName = resultName(details, AccuracyInfo._MissRecords)
+          val missRecordsStep = SparkSqlStep(
+            missRecordsName,
+            missRecordsSql,
+            Map[String, Any](),
+            resultPersistType(details, AccuracyInfo._MissRecords, RecordPersistType),
+            resultUpdateDataSourceOpt(details, AccuracyInfo._MissRecords)
+          )
+
+          // 2. miss count
+          val missTableName = "_miss_"
+          val missColName = getNameOpt(details, AccuracyInfo._Miss).getOrElse(AccuracyInfo._Miss)
+          val missSql = {
+            s"SELECT `${GroupByColumn.tmst}` AS `${GroupByColumn.tmst}`, COUNT(*) AS `${missColName}` FROM `${missRecordsName}` GROUP BY `${GroupByColumn.tmst}`"
+          }
+          val missStep = SparkSqlStep(
+            missTableName,
+            missSql,
+            Map[String, Any](),
+            NonePersistType,
+            None
+          )
+
+          // 3. total count
+          val totalTableName = "_total_"
+          val totalColName = getNameOpt(details, AccuracyInfo._Total).getOrElse(AccuracyInfo._Total)
+          val totalSql = {
+            s"SELECT `${GroupByColumn.tmst}` AS `${GroupByColumn.tmst}`, COUNT(*) AS `${totalColName}` FROM `${sourceName}` GROUP BY `${GroupByColumn.tmst}`"
+          }
+          val totalStep = SparkSqlStep(
+            totalTableName,
+            totalSql,
+            Map[String, Any](),
+            NonePersistType,
+            None
+          )
+
+          // 4. accuracy metric
+          val matchedColName = getNameOpt(details, AccuracyInfo._Matched).getOrElse(AccuracyInfo._Matched)
+          val accuracyMetricSql = {
+            s"""
+               |SELECT `${totalTableName}`.`${GroupByColumn.tmst}` AS `${GroupByColumn.tmst}`,
+               |`${missTableName}`.`${missColName}` AS `${missColName}`,
+               |`${totalTableName}`.`${totalColName}` AS `${totalColName}`
+               |FROM `${totalTableName}` FULL JOIN `${missTableName}`
+               |ON `${totalTableName}`.`${GroupByColumn.tmst}` = `${missTableName}`.`${GroupByColumn.tmst}`
+          """.stripMargin
+          }
+          val accuracyMetricName = resultName(details, AccuracyInfo._Accuracy)
+          val accuracyMetricStep = SparkSqlStep(
+            accuracyMetricName,
+            accuracyMetricSql,
+            details,
+            //          resultPersistType(details, AccuracyInfo._Accuracy, MetricPersistType)
+            NonePersistType,
+            None
+          )
+
+          // 5. accuracy metric filter
+          val accuracyStep = DfOprStep(
+            accuracyMetricName,
+            "accuracy",
+            Map[String, Any](
+              ("df.name" -> accuracyMetricName),
+              ("miss" -> missColName),
+              ("total" -> totalColName),
+              ("matched" -> matchedColName),
+              ("tmst" -> GroupByColumn.tmst)
+            ),
+            resultPersistType(details, AccuracyInfo._Accuracy, MetricPersistType),
+            None
+          )
+
+          missRecordsStep :: missStep :: totalStep :: accuracyMetricStep :: accuracyStep :: Nil
+        }
+      }
+      case ProfilingType => {
+        val sourceName = getNameOpt(details, ProfilingInfo._Source) match {
+          case Some(name) => name
+          case _ => dataSourceNames.head
+        }
+        val analyzer = ProfilingAnalyzer(expr.asInstanceOf[ProfilingClause], sourceName)
+
+        analyzer.selectionExprs.foreach(println)
+
+        val selExprDescs = analyzer.selectionExprs.map { sel =>
+          val alias = sel match {
+            case s: AliasableExpr if (s.alias.nonEmpty) => s" AS `${s.alias.get}`"
+            case _ => ""
+          }
+          s"${sel.desc}${alias}"
+        }
+
+//        val selClause = (s"`${GroupByColumn.tmst}`" +: selExprDescs).mkString(", ")
+        val selClause = if (analyzer.containsAllSelectionExpr) {
+          selExprDescs.mkString(", ")
+        } else {
+          (s"`${GroupByColumn.tmst}`" +: selExprDescs).mkString(", ")
+        }
+
+//        val tailClause = analyzer.tailsExprs.map(_.desc).mkString(" ")
+        val tmstGroupbyClause = GroupbyClause(LiteralStringExpr(s"`${GroupByColumn.tmst}`") :: Nil, None)
+        val mergedGroubbyClause = tmstGroupbyClause.merge(analyzer.groupbyExprOpt match {
+          case Some(gbc) => gbc
+          case _ => GroupbyClause(Nil, None)
+        })
+        val groupbyClause = mergedGroubbyClause.desc
+        val preGroupbyClause = analyzer.preGroupbyExprs.map(_.desc).mkString(" ")
+        val postGroupbyClause = analyzer.postGroupbyExprs.map(_.desc).mkString(" ")
+
+        if (!checkDataSourceExists(sourceName)) {
+          Nil
+        } else {
+          // 1. select statement
+          val profilingSql = {
+//            s"SELECT `${GroupByColumn.tmst}`, ${selClause} FROM ${sourceName} ${tailClause} GROUP BY `${GroupByColumn.tmst}`"
+            s"SELECT ${selClause} FROM ${sourceName} ${preGroupbyClause} ${groupbyClause} ${postGroupbyClause}"
+          }
+          val profilingMetricName = resultName(details, ProfilingInfo._Profiling)
+          val profilingStep = SparkSqlStep(
+            profilingMetricName,
+            profilingSql,
+            details,
+            resultPersistType(details, ProfilingInfo._Profiling, MetricPersistType),
+            None
+          )
+
+          // 2. clear processed data
+//          val clearDataSourceStep = DfOprStep(
+//            s"${sourceName}_clear",
+//            "clear",
+//            Map[String, Any](
+//              ("df.name" -> sourceName)
+//            ),
+//            NonePersistType,
+//            Some(sourceName)
+//          )
+//
+//          profilingStep :: clearDataSourceStep :: Nil
+
+          profilingStep:: Nil
+        }
+
+      }
+      case TimelinessType => {
+        Nil
+      }
+      case _ => Nil
+    }
+  }
+
+  private def checkDataSourceExists(name: String): Boolean = {
+    try {
+      RuleAdaptorGroup.dataChecker.existDataSourceName(name)
+    } catch {
+      case e: Throwable => {
+        error(s"check data source exists error: ${e.getMessage}")
+        false
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptor.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptor.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptor.scala
new file mode 100644
index 0000000..744f52a
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptor.scala
@@ -0,0 +1,72 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import java.util.concurrent.atomic.AtomicLong
+
+
+import scala.collection.mutable.{Set => MutableSet}
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.rule.step.{ConcreteRuleStep, RuleStep}
+import org.apache.griffin.measure.rule.dsl.{DslType, PersistType}
+
+trait RuleAdaptor extends Loggable with Serializable {
+
+  val adaptPhase: AdaptPhase
+
+  val _name = "name"
+  val _rule = "rule"
+  val _persistType = "persist.type"
+  val _updateDataSource = "update.data.source"
+  val _details = "details"
+
+  protected def getName(param: Map[String, Any]) = param.getOrElse(_name, RuleStepNameGenerator.genName).toString
+  protected def getRule(param: Map[String, Any]) = param.getOrElse(_rule, "").toString
+  protected def getPersistType(param: Map[String, Any]) = PersistType(param.getOrElse(_persistType, "").toString)
+  protected def getUpdateDataSource(param: Map[String, Any]) = param.get(_updateDataSource).map(_.toString)
+  protected def getDetails(param: Map[String, Any]) = param.get(_details) match {
+    case Some(dt: Map[String, Any]) => dt
+    case _ => Map[String, Any]()
+  }
+
+  def getTempSourceNames(param: Map[String, Any]): Seq[String]
+
+  def genRuleStep(param: Map[String, Any]): Seq[RuleStep]
+  def genConcreteRuleStep(param: Map[String, Any]): Seq[ConcreteRuleStep] = {
+    genRuleStep(param).flatMap { rs =>
+      adaptConcreteRuleStep(rs)
+    }
+  }
+  protected def adaptConcreteRuleStep(ruleStep: RuleStep): Seq[ConcreteRuleStep]
+
+}
+
+object RuleStepNameGenerator {
+  private val counter: AtomicLong = new AtomicLong(0L)
+  private val head: String = "rs"
+
+  def genName: String = {
+    s"${head}${increment}"
+  }
+
+  private def increment: Long = {
+    counter.incrementAndGet()
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptorGroup.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptorGroup.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptorGroup.scala
new file mode 100644
index 0000000..237902a
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/RuleAdaptorGroup.scala
@@ -0,0 +1,105 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.process.ProcessType
+import org.apache.griffin.measure.process.check.DataChecker
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.step._
+import org.apache.spark.sql.SQLContext
+
+import scala.collection.mutable.{Map => MutableMap}
+
+object RuleAdaptorGroup {
+
+  val _dslType = "dsl.type"
+
+  var dataSourceNames: Seq[String] = _
+  var functionNames: Seq[String] = _
+
+  var dataChecker: DataChecker = _
+
+  def init(sqlContext: SQLContext, dsNames: Seq[String]): Unit = {
+    val functions = sqlContext.sql("show functions")
+    functionNames = functions.map(_.getString(0)).collect
+    dataSourceNames = dsNames
+
+    dataChecker = DataChecker(sqlContext)
+  }
+
+  private def getDslType(param: Map[String, Any], defDslType: DslType) = {
+    val dt = DslType(param.getOrElse(_dslType, "").toString)
+    dt match {
+      case UnknownDslType => defDslType
+      case _ => dt
+    }
+  }
+
+  private def genRuleAdaptor(dslType: DslType, dsNames: Seq[String], adaptPhase: AdaptPhase): Option[RuleAdaptor] = {
+    dslType match {
+      case SparkSqlType => Some(SparkSqlAdaptor(adaptPhase))
+      case DfOprType => Some(DataFrameOprAdaptor(adaptPhase))
+      case GriffinDslType => Some(GriffinDslAdaptor(dsNames, functionNames, adaptPhase))
+      case _ => None
+    }
+  }
+
+//  def genRuleSteps(evaluateRuleParam: EvaluateRuleParam): Seq[RuleStep] = {
+//    val dslTypeStr = if (evaluateRuleParam.dslType == null) "" else evaluateRuleParam.dslType
+//    val defaultDslType = DslType(dslTypeStr)
+//    val rules = evaluateRuleParam.rules
+//    var dsNames = dataSourceNames
+//    val steps = rules.flatMap { param =>
+//      val dslType = getDslType(param)
+//      genRuleAdaptor(dslType) match {
+//        case Some(ruleAdaptor) => ruleAdaptor.genRuleStep(param)
+//        case _ => Nil
+//      }
+//    }
+//    steps.foreach(println)
+//    steps
+//  }
+
+  def genConcreteRuleSteps(evaluateRuleParam: EvaluateRuleParam,
+                           adaptPhase: AdaptPhase
+                          ): Seq[ConcreteRuleStep] = {
+    val dslTypeStr = if (evaluateRuleParam.dslType == null) "" else evaluateRuleParam.dslType
+    val defaultDslType = DslType(dslTypeStr)
+    val ruleParams = evaluateRuleParam.rules
+    genConcreteRuleSteps(ruleParams, defaultDslType, adaptPhase)
+  }
+
+  def genConcreteRuleSteps(ruleParams: Seq[Map[String, Any]],
+                           defDslType: DslType, adaptPhase: AdaptPhase
+                          ): Seq[ConcreteRuleStep] = {
+    val (steps, dsNames) = ruleParams.foldLeft((Seq[ConcreteRuleStep](), dataSourceNames)) { (res, param) =>
+      val (preSteps, preNames) = res
+      val dslType = getDslType(param, defDslType)
+      val (curSteps, curNames) = genRuleAdaptor(dslType, preNames, adaptPhase) match {
+        case Some(ruleAdaptor) => (ruleAdaptor.genConcreteRuleStep(param), preNames ++ ruleAdaptor.getTempSourceNames(param))
+        case _ => (Nil, preNames)
+      }
+      (preSteps ++ curSteps, curNames)
+    }
+    steps
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/SparkSqlAdaptor.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/SparkSqlAdaptor.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/SparkSqlAdaptor.scala
new file mode 100644
index 0000000..78121fa
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/adaptor/SparkSqlAdaptor.scala
@@ -0,0 +1,54 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.adaptor
+
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.rule.step._
+
+case class SparkSqlAdaptor(adaptPhase: AdaptPhase) extends RuleAdaptor {
+
+  def genRuleStep(param: Map[String, Any]): Seq[RuleStep] = {
+    SparkSqlStep(getName(param), getRule(param), getDetails(param),
+      getPersistType(param), getUpdateDataSource(param)) :: Nil
+  }
+  def adaptConcreteRuleStep(ruleStep: RuleStep): Seq[ConcreteRuleStep] = {
+    ruleStep match {
+      case rs @ SparkSqlStep(name, rule, details, persistType, udsOpt) => {
+        adaptPhase match {
+          case PreProcPhase => rs :: Nil
+          case RunPhase => {
+            val repSel = rule.replaceFirst("(?i)select", s"SELECT `${GroupByColumn.tmst}` AS `${GroupByColumn.tmst}`,")
+            val groupbyRule = repSel.concat(s" GROUP BY `${GroupByColumn.tmst}`")
+            val nrs = SparkSqlStep(name, groupbyRule, details, persistType, udsOpt)
+            nrs :: Nil
+          }
+        }
+      }
+      case _ => Nil
+    }
+  }
+
+  def getTempSourceNames(param: Map[String, Any]): Seq[String] = {
+    param.get(_name) match {
+      case Some(name) => name.toString :: Nil
+      case _ => Nil
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DqType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DqType.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DqType.scala
new file mode 100644
index 0000000..ac27403
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DqType.scala
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl
+
+import scala.util.matching.Regex
+
+
+sealed trait DqType {
+  val regex: Regex
+  val desc: String
+}
+
+object DqType {
+  private val dqTypes: List[DqType] = List(AccuracyType, ProfilingType, TimelinessType, UnknownType)
+  def apply(ptn: String): DqType = {
+    dqTypes.filter(tp => ptn match {
+      case tp.regex() => true
+      case _ => false
+    }).headOption.getOrElse(UnknownType)
+  }
+  def unapply(pt: DqType): Option[String] = Some(pt.desc)
+}
+
+final case object AccuracyType extends DqType {
+  val regex = "^(?i)accuracy$".r
+  val desc = "accuracy"
+}
+
+final case object ProfilingType extends DqType {
+  val regex = "^(?i)profiling$".r
+  val desc = "profiling$"
+}
+
+final case object TimelinessType extends DqType {
+  val regex = "^(?i)timeliness$".r
+  val desc = "timeliness"
+}
+
+final case object UnknownType extends DqType {
+  val regex = "".r
+  val desc = "unknown"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DslType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DslType.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DslType.scala
new file mode 100644
index 0000000..cfda393
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/DslType.scala
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl
+
+import scala.util.matching.Regex
+
+
+sealed trait DslType {
+  val regex: Regex
+  val desc: String
+}
+
+object DslType {
+  private val dslTypes: List[DslType] = List(SparkSqlType, GriffinDslType, DfOprType, UnknownDslType)
+  def apply(ptn: String): DslType = {
+    dslTypes.filter(tp => ptn match {
+      case tp.regex() => true
+      case _ => false
+    }).headOption.getOrElse(UnknownDslType)
+  }
+  def unapply(pt: DslType): Option[String] = Some(pt.desc)
+}
+
+final case object SparkSqlType extends DslType {
+  val regex = "^(?i)spark-?sql$".r
+  val desc = "spark-sql"
+}
+
+final case object DfOprType extends DslType {
+  val regex = "^(?i)df-?opr$".r
+  val desc = "df-opr"
+}
+
+final case object GriffinDslType extends DslType {
+  val regex = "^(?i)griffin-?dsl$".r
+  val desc = "griffin-dsl"
+}
+
+final case object UnknownDslType extends DslType {
+  val regex = "".r
+  val desc = "unknown"
+}
\ No newline at end of file



[05/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/MathExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/MathExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/MathExpr.scala
deleted file mode 100644
index 661e8f4..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/MathExpr.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import org.apache.griffin.measure.rule.CalculationUtil._
-import org.apache.griffin.measure.rule.DataTypeCalculationUtil._
-import org.apache.spark.sql.types.DataType
-
-trait MathExpr extends Expr {
-
-}
-
-case class MathFactorExpr(self: Expr) extends MathExpr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = self.calculate(values)
-  val desc: String = self.desc
-  val dataSources: Set[String] = self.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    self.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    self.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    self.getPersistExprs(ds)
-  }
-}
-
-case class UnaryMathExpr(oprList: Iterable[String], factor: Expr) extends MathExpr {
-  private val (posOpr, negOpr) = ("+", "-")
-  def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val fv = factor.calculate(values)
-    oprList.foldRight(fv) { (opr, v) =>
-      opr match {
-        case this.posOpr => v
-        case this.negOpr => -v
-        case _ => None
-      }
-    }
-  }
-  val desc: String = oprList.foldRight(factor.desc) { (prev, ex) => s"${prev}${ex}" }
-  val dataSources: Set[String] = factor.dataSources
-  override def cacheUnit: Boolean = true
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    factor.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    factor.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    factor.getPersistExprs(ds)
-  }
-}
-
-case class BinaryMathExpr(first: MathExpr, others: Iterable[(String, MathExpr)]) extends MathExpr {
-  private val (addOpr, subOpr, mulOpr, divOpr, modOpr) = ("+", "-", "*", "/", "%")
-  def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val fv = first.calculate(values)
-    others.foldLeft(fv) { (v, pair) =>
-      val (opr, next) = pair
-      val nv = next.calculate(values)
-      opr match {
-        case this.addOpr => v + nv
-        case this.subOpr => v - nv
-        case this.mulOpr => v * nv
-        case this.divOpr => v / nv
-        case this.modOpr => v % nv
-        case _ => None
-      }
-    }
-  }
-  val desc: String = others.foldLeft(first.desc) { (ex, next) => s"${ex} ${next._1} ${next._2.desc}" }
-  val dataSources: Set[String] = first.dataSources ++ others.flatMap(_._2.dataSources).toSet
-  override def cacheUnit: Boolean = true
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    first.getCacheExprs(ds) ++ others.flatMap(_._2.getCacheExprs(ds))
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    first.getFinalCacheExprs(ds) ++ others.flatMap(_._2.getFinalCacheExprs(ds))
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    first.getPersistExprs(ds) ++ others.flatMap(_._2.getPersistExprs(ds))
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/SelectExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/SelectExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/SelectExpr.scala
deleted file mode 100644
index 5b7f1b0..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/SelectExpr.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import org.apache.spark.sql.types.DataType
-import org.apache.griffin.measure.rule.CalculationUtil._
-
-trait SelectExpr extends Expr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = None
-}
-
-case class IndexFieldRangeSelectExpr(fields: Iterable[FieldDescOnly]) extends SelectExpr {
-  val desc: String = s"[${fields.map(_.desc).mkString(", ")}]"
-  val dataSources: Set[String] = Set.empty[String]
-}
-
-case class FunctionOperationExpr(func: String, args: Iterable[MathExpr]) extends SelectExpr {
-  val desc: String = s".${func}(${args.map(_.desc).mkString(", ")})"
-  val dataSources: Set[String] = args.flatMap(_.dataSources).toSet
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = args.flatMap(_.getCacheExprs(ds))
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = args.flatMap(_.getFinalCacheExprs(ds))
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = args.flatMap(_.getPersistExprs(ds))
-}
-
-case class FilterSelectExpr(field: FieldDesc, compare: String, value: MathExpr) extends SelectExpr {
-  val desc: String = s"[${field.desc} ${compare} ${value.desc}]"
-  val dataSources: Set[String] = value.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = value.getCacheExprs(ds)
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = value.getFinalCacheExprs(ds)
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = value.getPersistExprs(ds)
-  private val (eqOpr, neqOpr, btOpr, bteOpr, ltOpr, lteOpr) = ("""==?""".r, """!==?""".r, ">", ">=", "<", "<=")
-  override def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val (lv, rv) = (values.get(fieldKey), value.calculate(values))
-    compare match {
-      case this.eqOpr() => lv === rv
-      case this.neqOpr() => lv =!= rv
-      case this.btOpr => lv > rv
-      case this.bteOpr => lv >= rv
-      case this.ltOpr => lv < rv
-      case this.lteOpr => lv <= rv
-      case _ => None
-    }
-  }
-  def fieldKey: String = s"__${field.field}"
-}
-
-// -- selection --
-case class SelectionExpr(head: SelectionHead, selectors: Iterable[SelectExpr]) extends Expr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = values.get(_id)
-
-  val desc: String = {
-    val argsString = selectors.map(_.desc).mkString("")
-    s"${head.desc}${argsString}"
-  }
-  val dataSources: Set[String] = {
-    val selectorDataSources = selectors.flatMap(_.dataSources).toSet
-    selectorDataSources + head.head
-  }
-
-  override def cacheUnit: Boolean = true
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    selectors.flatMap(_.getCacheExprs(ds))
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    selectors.flatMap(_.getFinalCacheExprs(ds))
-  }
-
-  override def persistUnit: Boolean = true
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    selectors.flatMap(_.getPersistExprs(ds))
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/func/DefaultFunctionDefine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/func/DefaultFunctionDefine.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/func/DefaultFunctionDefine.scala
deleted file mode 100644
index 15161c3..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/func/DefaultFunctionDefine.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.func
-
-import org.apache.griffin.measure.utils.JsonUtil
-
-class DefaultFunctionDefine extends FunctionDefine {
-
-  def json(strOpt: Option[_]): Map[String, Any] = {
-    try {
-      strOpt match {
-        case Some(str: String) => JsonUtil.toAnyMap(str)
-        case _ => throw new Exception("json function param should be string")
-      }
-    } catch {
-      case e: Throwable => throw e
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionDefine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionDefine.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionDefine.scala
deleted file mode 100644
index d23fc7a..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionDefine.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.func
-
-trait FunctionDefine extends Serializable {
-
-}
-
-class UnKnown {}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionUtil.scala
deleted file mode 100644
index 57e934d..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/func/FunctionUtil.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.func
-
-import java.lang.reflect.Method
-
-import org.apache.griffin.measure.log.Loggable
-
-import scala.collection.mutable.{Map => MutableMap}
-
-object FunctionUtil extends Loggable {
-
-  val functionDefines: MutableMap[String, FunctionDefine] = MutableMap[String, FunctionDefine]()
-
-  registerFunctionDefine(Array(classOf[DefaultFunctionDefine].getCanonicalName))
-
-  def registerFunctionDefine(classes: Iterable[String]): Unit = {
-    for (cls <- classes) {
-      try {
-        val clz: Class[_] = Class.forName(cls)
-        if (classOf[FunctionDefine].isAssignableFrom(clz)) {
-          functionDefines += (cls -> clz.newInstance.asInstanceOf[FunctionDefine])
-        } else {
-          warn(s"${cls} register fails: ${cls} is not sub class of ${classOf[FunctionDefine].getCanonicalName}")
-        }
-      } catch {
-        case e: Throwable => warn(s"${cls} register fails: ${e.getMessage}")
-      }
-    }
-  }
-
-  def invoke(methodName: String, params: Array[Option[Any]]): Seq[Option[Any]] = {
-//    val paramTypes = params.map { param =>
-//      try {
-//        param match {
-//          case Some(v) => v.getClass
-//          case _ => classOf[UnKnown]
-//        }
-//      } catch {
-//        case e: Throwable => classOf[UnKnown]
-//      }
-//    }
-    val paramTypes = params.map(a => classOf[Option[_]])
-
-    functionDefines.values.foldLeft(Nil: Seq[Option[Any]]) { (res, funcDef) =>
-      if (res.isEmpty) {
-        val clz = funcDef.getClass
-        try {
-          val method = clz.getMethod(methodName, paramTypes: _*)
-          Seq(Some(method.invoke(funcDef, params: _*)))
-        } catch {
-          case e: Throwable => res
-        }
-      } else res
-    }
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/preproc/PreProcRuleGenerator.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/preproc/PreProcRuleGenerator.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/preproc/PreProcRuleGenerator.scala
new file mode 100644
index 0000000..22d64d8
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/preproc/PreProcRuleGenerator.scala
@@ -0,0 +1,72 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.preproc
+
+object PreProcRuleGenerator {
+
+  val _name = "name"
+
+  def genPreProcRules(rules: Seq[Map[String, Any]], suffix: String): Seq[Map[String, Any]] = {
+    if (rules == null) Nil else {
+      rules.map { rule =>
+        genPreProcRule(rule, suffix)
+      }
+    }
+  }
+
+  def getRuleNames(rules: Seq[Map[String, Any]]): Seq[String] = {
+    if (rules == null) Nil else {
+      rules.flatMap { rule =>
+        rule.get(_name) match {
+          case Some(s: String) => Some(s)
+          case _ => None
+        }
+      }
+    }
+  }
+
+  private def genPreProcRule(param: Map[String, Any], suffix: String
+                            ): Map[String, Any] = {
+    val keys = param.keys
+    keys.foldLeft(param) { (map, key) =>
+      map.get(key) match {
+        case Some(s: String) => map + (key -> genNewString(s, suffix))
+        case Some(subMap: Map[String, Any]) => map + (key -> genPreProcRule(subMap, suffix))
+        case Some(arr: Seq[_]) => map + (key -> genPreProcRule(arr, suffix))
+        case _ => map
+      }
+    }
+  }
+
+  private def genPreProcRule(paramArr: Seq[Any], suffix: String): Seq[Any] = {
+    paramArr.foldLeft(Nil: Seq[Any]) { (res, param) =>
+      param match {
+        case s: String => res :+ genNewString(s, suffix)
+        case map: Map[String, Any] => res :+ genPreProcRule(map, suffix)
+        case arr: Seq[_] => res :+ genPreProcRule(arr, suffix)
+        case _ => res :+ param
+      }
+    }
+  }
+
+  private def genNewString(str: String, suffix: String): String = {
+    str.replaceAll("""\$\{(.*)\}""", s"$$1_${suffix}")
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/step/ConcreteRuleStep.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/step/ConcreteRuleStep.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/step/ConcreteRuleStep.scala
new file mode 100644
index 0000000..4b3a4d4
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/step/ConcreteRuleStep.scala
@@ -0,0 +1,37 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.step
+
+import org.apache.griffin.measure.rule.dsl._
+
+trait ConcreteRuleStep extends RuleStep {
+
+  val persistType: PersistType
+
+  val updateDataSource: Option[String]
+
+//  def isGroupMetric: Boolean = {
+//    val _GroupMetric = "group.metric"
+//    details.get(_GroupMetric) match {
+//      case Some(b: Boolean) => b
+//      case _ => false
+//    }
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/step/DfOprStep.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/step/DfOprStep.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/step/DfOprStep.scala
new file mode 100644
index 0000000..86f0bf3
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/step/DfOprStep.scala
@@ -0,0 +1,29 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.step
+
+import org.apache.griffin.measure.rule.dsl._
+
+case class DfOprStep(name: String, rule: String, details: Map[String, Any],
+                     persistType: PersistType, updateDataSource: Option[String]
+                    ) extends ConcreteRuleStep {
+
+  val dslType: DslType = DfOprType
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/step/GriffinDslStep.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/step/GriffinDslStep.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/step/GriffinDslStep.scala
new file mode 100644
index 0000000..21db8cf
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/step/GriffinDslStep.scala
@@ -0,0 +1,28 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.step
+
+import org.apache.griffin.measure.rule.dsl._
+
+case class GriffinDslStep(name: String, rule: String, dqType: DqType, details: Map[String, Any]
+                         ) extends RuleStep {
+
+  val dslType: DslType = GriffinDslType
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/step/RuleStep.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/step/RuleStep.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/step/RuleStep.scala
new file mode 100644
index 0000000..4675ffe
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/step/RuleStep.scala
@@ -0,0 +1,31 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.step
+
+import org.apache.griffin.measure.rule.dsl.{DslType, PersistType}
+
+trait RuleStep extends Serializable {
+
+  val dslType: DslType
+
+  val name: String
+  val rule: String
+  val details: Map[String, Any]
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/step/SparkSqlStep.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/step/SparkSqlStep.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/step/SparkSqlStep.scala
new file mode 100644
index 0000000..62c3c35
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/step/SparkSqlStep.scala
@@ -0,0 +1,30 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.step
+
+import org.apache.griffin.measure.persist._
+import org.apache.griffin.measure.rule.dsl._
+
+case class SparkSqlStep(name: String, rule: String, details: Map[String, Any],
+                        persistType: PersistType, updateDataSource: Option[String]
+                       ) extends ConcreteRuleStep {
+
+  val dslType: DslType = SparkSqlType
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/udf/GriffinUdfs.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/udf/GriffinUdfs.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/udf/GriffinUdfs.scala
new file mode 100644
index 0000000..11e8c8f
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/udf/GriffinUdfs.scala
@@ -0,0 +1,33 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.udf
+
+import org.apache.spark.sql.SQLContext
+
+object GriffinUdfs {
+
+  def register(sqlContext: SQLContext): Unit = {
+    sqlContext.udf.register("index_of", indexOf)
+  }
+
+  private val indexOf = (arr: Seq[String], v: String) => {
+    arr.indexOf(v)
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsFileDumpUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsFileDumpUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsFileDumpUtil.scala
index 8a608ff..416f567 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsFileDumpUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsFileDumpUtil.scala
@@ -68,7 +68,7 @@ object HdfsFileDumpUtil {
 
   def remove(path: String, filename: String, withSuffix: Boolean): Unit = {
     if (withSuffix) {
-      val files = HdfsUtil.listSubPaths(path, "file")
+      val files = HdfsUtil.listSubPathsByType(path, "file")
       val patternFiles = files.filter(samePattern(_, filename))
       patternFiles.foreach { f =>
         val rmPath = HdfsUtil.getHdfsFilePath(path, f)

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
index 6dd54b7..9fa6bcf 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
@@ -18,10 +18,11 @@ under the License.
 */
 package org.apache.griffin.measure.utils
 
+import org.apache.griffin.measure.log.Loggable
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
 
-object HdfsUtil {
+object HdfsUtil extends Loggable {
 
   private val seprator = "/"
 
@@ -32,8 +33,17 @@ object HdfsUtil {
   private val dfs = FileSystem.get(conf)
 
   def existPath(filePath: String): Boolean = {
-    val path = new Path(filePath)
-    dfs.exists(path)
+    try {
+      val path = new Path(filePath)
+      dfs.exists(path)
+    } catch {
+      case e: Throwable => false
+    }
+  }
+
+  def existFileInDir(dirPath: String, fileName: String): Boolean = {
+    val filePath = getHdfsFilePath(dirPath, fileName)
+    existPath(filePath)
   }
 
   def createFile(filePath: String): FSDataOutputStream = {
@@ -75,8 +85,12 @@ object HdfsUtil {
   }
 
   def deleteHdfsPath(dirPath: String): Unit = {
-    val path = new Path(dirPath)
-    if (dfs.exists(path)) dfs.delete(path, true)
+    try {
+      val path = new Path(dirPath)
+      if (dfs.exists(path)) dfs.delete(path, true)
+    } catch {
+      case e: Throwable => error(s"delete path [${dirPath}] error: ${e.getMessage}")
+    }
   }
 
 //  def listPathFiles(dirPath: String): Iterable[String] = {
@@ -96,25 +110,38 @@ object HdfsUtil {
 //    }
 //  }
 
-  def listSubPaths(dirPath: String, subType: String, fullPath: Boolean = false): Iterable[String] = {
-    val path = new Path(dirPath)
-    try {
-      val fileStatusArray = dfs.listStatus(path)
-      fileStatusArray.filter { fileStatus =>
-        subType match {
-          case "dir" => fileStatus.isDirectory
-          case "file" => fileStatus.isFile
-          case _ => true
+  def listSubPathsByType(dirPath: String, subType: String, fullPath: Boolean = false): Iterable[String] = {
+    if (existPath(dirPath)) {
+      try {
+        val path = new Path(dirPath)
+        val fileStatusArray = dfs.listStatus(path)
+        fileStatusArray.filter { fileStatus =>
+          subType match {
+            case "dir" => fileStatus.isDirectory
+            case "file" => fileStatus.isFile
+            case _ => true
+          }
+        }.map { fileStatus =>
+          val fname = fileStatus.getPath.getName
+          if (fullPath) getHdfsFilePath(dirPath, fname) else fname
+        }
+      } catch {
+        case e: Throwable => {
+          warn(s"list path [${dirPath}] warn: ${e.getMessage}")
+          Nil
         }
-      }.map { fileStatus =>
-        val fname = fileStatus.getPath.getName
-        if (fullPath) getHdfsFilePath(dirPath, fname) else fname
-      }
-    } catch {
-      case e: Throwable => {
-        println(s"list path files error: ${e.getMessage}")
-        Nil
       }
+    } else Nil
+  }
+
+  def listSubPathsByTypes(dirPath: String, subTypes: Iterable[String], fullPath: Boolean = false): Iterable[String] = {
+    subTypes.flatMap { subType =>
+      listSubPathsByType(dirPath, subType, fullPath)
     }
   }
+
+  def fileNameFromPath(filePath: String): String = {
+    val path = new Path(filePath)
+    path.getName
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
new file mode 100644
index 0000000..7954b6d
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
@@ -0,0 +1,164 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.utils
+
+object ParamUtil {
+
+  implicit class ParamMap(params: Map[String, Any]) {
+    def getAny(key: String, defValue: Any): Any = {
+      params.get(key) match {
+        case Some(v) => v
+        case _ => defValue
+      }
+    }
+
+    def getAnyRef[T](key: String, defValue: T)(implicit m: Manifest[T]): T = {
+      params.get(key) match {
+        case Some(v: T) => v
+        case _ => defValue
+      }
+    }
+
+    def getString(key: String, defValue: String): String = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toString
+          case Some(v) => v.toString
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getByte(key: String, defValue: Byte): Byte = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toByte
+          case Some(v: Byte) => v.toByte
+          case Some(v: Short) => v.toByte
+          case Some(v: Int) => v.toByte
+          case Some(v: Long) => v.toByte
+          case Some(v: Float) => v.toByte
+          case Some(v: Double) => v.toByte
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getShort(key: String, defValue: Short): Short = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toShort
+          case Some(v: Byte) => v.toShort
+          case Some(v: Short) => v.toShort
+          case Some(v: Int) => v.toShort
+          case Some(v: Long) => v.toShort
+          case Some(v: Float) => v.toShort
+          case Some(v: Double) => v.toShort
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getInt(key: String, defValue: Int): Int = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toInt
+          case Some(v: Byte) => v.toInt
+          case Some(v: Short) => v.toInt
+          case Some(v: Int) => v.toInt
+          case Some(v: Long) => v.toInt
+          case Some(v: Float) => v.toInt
+          case Some(v: Double) => v.toInt
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getLong(key: String, defValue: Long): Long = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toLong
+          case Some(v: Byte) => v.toLong
+          case Some(v: Short) => v.toLong
+          case Some(v: Int) => v.toLong
+          case Some(v: Long) => v.toLong
+          case Some(v: Float) => v.toLong
+          case Some(v: Double) => v.toLong
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getFloat(key: String, defValue: Float): Float = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toFloat
+          case Some(v: Byte) => v.toFloat
+          case Some(v: Short) => v.toFloat
+          case Some(v: Int) => v.toFloat
+          case Some(v: Long) => v.toFloat
+          case Some(v: Float) => v.toFloat
+          case Some(v: Double) => v.toFloat
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getDouble(key: String, defValue: Double): Double = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toDouble
+          case Some(v: Byte) => v.toDouble
+          case Some(v: Short) => v.toDouble
+          case Some(v: Int) => v.toDouble
+          case Some(v: Long) => v.toDouble
+          case Some(v: Float) => v.toDouble
+          case Some(v: Double) => v.toDouble
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+
+    def getBoolean(key: String, defValue: Boolean): Boolean = {
+      try {
+        params.get(key) match {
+          case Some(v: String) => v.toBoolean
+          case _ => defValue
+        }
+      } catch {
+        case _: Throwable => defValue
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala b/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
index 0079d10..fe721d2 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
@@ -22,8 +22,8 @@ import scala.util.{Failure, Success, Try}
 
 object TimeUtil {
 
-  final val TimeRegex = """([+\-]?\d+)(d|h|m|s|ms)""".r
-  final val PureTimeRegex = """([+\-]?\d+)""".r
+  final val TimeRegex = """^([+\-]?\d+)(d|h|m|s|ms)$""".r
+  final val PureTimeRegex = """^([+\-]?\d+)$""".r
 
   def milliseconds(timeString: String): Option[Long] = {
     val value: Option[Long] = {

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test-accuracy-streaming-multids.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test-accuracy-streaming-multids.json b/measure/src/test/resources/config-test-accuracy-streaming-multids.json
new file mode 100644
index 0000000..18532b0
--- /dev/null
+++ b/measure/src/test/resources/config-test-accuracy-streaming-multids.json
@@ -0,0 +1,144 @@
+{
+  "name": "accu_streaming",
+
+  "process.type": "streaming",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "kafka",
+          "version": "0.8",
+          "config": {
+            "kafka.config": {
+              "bootstrap.servers": "10.149.247.156:9092",
+              "group.id": "group1",
+              "auto.offset.reset": "smallest",
+              "auto.commit.enable": "false"
+            },
+            "topics": "sss",
+            "key.type": "java.lang.String",
+            "value.type": "java.lang.String"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${s1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${s1}"
+            }
+          ]
+        },
+        {
+          "type": "text-dir",
+          "config": {
+            "dir.path": "hdfs://localhost/griffin/text",
+            "data.dir.depth": 0,
+            "success.file": "_SUCCESS",
+            "done.file": "_DONE"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${s1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${s1}"
+            }
+          ]
+        }
+      ],
+      "cache": {
+        "file.path": "hdfs://localhost/griffin/streaming/dump/source",
+        "info.path": "source",
+        "ready.time.interval": "10s",
+        "ready.time.delay": "0",
+        "time.range": ["-2m", "0"]
+      }
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "kafka",
+          "version": "0.8",
+          "config": {
+            "kafka.config": {
+              "bootstrap.servers": "10.149.247.156:9092",
+              "group.id": "group1",
+              "auto.offset.reset": "smallest",
+              "auto.commit.enable": "false"
+            },
+            "topics": "ttt",
+            "key.type": "java.lang.String",
+            "value.type": "java.lang.String"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${t1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${t1}"
+            }
+          ]
+        }
+      ],
+      "cache": {
+        "file.path": "hdfs://localhost/griffin/streaming/dump/target",
+        "info.path": "target",
+        "ready.time.interval": "10s",
+        "ready.time.delay": "0",
+        "time.range": ["-2m", "0"]
+      }
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "accuracy",
+        "rule": "source.name = target.name and source.age = target.age",
+        "details": {
+          "source": "source",
+          "target": "target",
+          "miss.records": {
+            "name": "miss.records",
+            "persist.type": "record",
+            "update.data.source": "source"
+          },
+          "accuracy": {
+            "name": "accu",
+            "persist.type": "metric"
+          },
+          "miss": "miss_count",
+          "total": "total_count",
+          "matched": "matched_count"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test-accuracy-streaming.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test-accuracy-streaming.json b/measure/src/test/resources/config-test-accuracy-streaming.json
new file mode 100644
index 0000000..276f8dd
--- /dev/null
+++ b/measure/src/test/resources/config-test-accuracy-streaming.json
@@ -0,0 +1,119 @@
+{
+  "name": "accu_streaming",
+
+  "process.type": "streaming",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "kafka",
+          "version": "0.8",
+          "config": {
+            "kafka.config": {
+              "bootstrap.servers": "10.149.247.156:9092",
+              "group.id": "group1",
+              "auto.offset.reset": "smallest",
+              "auto.commit.enable": "false"
+            },
+            "topics": "sss",
+            "key.type": "java.lang.String",
+            "value.type": "java.lang.String"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${s1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${s1}"
+            }
+          ]
+        }
+      ],
+      "cache": {
+        "file.path": "hdfs://localhost/griffin/streaming/dump/source",
+        "info.path": "source",
+        "ready.time.interval": "10s",
+        "ready.time.delay": "0",
+        "time.range": ["-2m", "0"]
+      }
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "kafka",
+          "version": "0.8",
+          "config": {
+            "kafka.config": {
+              "bootstrap.servers": "10.149.247.156:9092",
+              "group.id": "group1",
+              "auto.offset.reset": "smallest",
+              "auto.commit.enable": "false"
+            },
+            "topics": "ttt",
+            "key.type": "java.lang.String",
+            "value.type": "java.lang.String"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${t1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${t1}"
+            }
+          ]
+        }
+      ],
+      "cache": {
+        "file.path": "hdfs://localhost/griffin/streaming/dump/target",
+        "info.path": "target",
+        "ready.time.interval": "10s",
+        "ready.time.delay": "0",
+        "time.range": ["-2m", "0"]
+      }
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "accuracy",
+        "rule": "source.name = target.name and source.age = target.age",
+        "details": {
+          "source": "source",
+          "target": "target",
+          "miss.records": {
+            "name": "miss.records",
+            "persist.type": "record",
+            "update.data.source": "source"
+          },
+          "accuracy": {
+            "name": "accu",
+            "persist.type": "metric"
+          },
+          "miss": "miss_count",
+          "total": "total_count",
+          "matched": "matched_count"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test-accuracy.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test-accuracy.json b/measure/src/test/resources/config-test-accuracy.json
new file mode 100644
index 0000000..ecbdaaa
--- /dev/null
+++ b/measure/src/test/resources/config-test-accuracy.json
@@ -0,0 +1,56 @@
+{
+  "name": "accu_batch_test",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "src",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.name": "src/test/resources/users_info_src.avro"
+          }
+        }
+      ]
+    }, {
+      "name": "tgt",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.name": "src/test/resources/users_info_target.avro"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "accuracy",
+        "rule": "src.user_id = tgt.user_id AND upper(src.first_name) = upper(tgt.first_name) AND src.last_name = tgt.last_name AND src.address = tgt.address AND src.email = tgt.email AND src.phone = tgt.phone AND src.post_code = tgt.post_code",
+        "details": {
+          "source": "src",
+          "target": "tgt",
+          "miss.records": {
+            "name": "miss.records",
+            "persist.type": "record"
+          },
+          "accuracy": {
+            "name": "accu",
+            "persist.type": "metric"
+          },
+          "miss": "miss_count",
+          "total": "total_count",
+          "matched": "matched_count"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test-profiling-streaming.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test-profiling-streaming.json b/measure/src/test/resources/config-test-profiling-streaming.json
new file mode 100644
index 0000000..b2a74b8
--- /dev/null
+++ b/measure/src/test/resources/config-test-profiling-streaming.json
@@ -0,0 +1,68 @@
+{
+  "name": "prof_streaming",
+
+  "process.type": "streaming",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "kafka",
+          "version": "0.8",
+          "config": {
+            "kafka.config": {
+              "bootstrap.servers": "10.149.247.156:9092",
+              "group.id": "group1",
+              "auto.offset.reset": "smallest",
+              "auto.commit.enable": "false"
+            },
+            "topics": "sss",
+            "key.type": "java.lang.String",
+            "value.type": "java.lang.String"
+          },
+          "pre.proc": [
+            {
+              "dsl.type": "df-opr",
+              "name": "${s1}",
+              "rule": "from_json",
+              "persist.type": "cache",
+              "details": {
+                "df.name": "${this}"
+              }
+            },
+            {
+              "dsl.type": "spark-sql",
+              "name": "${this}",
+              "rule": "select name, age from ${s1}"
+            }
+          ]
+        }
+      ],
+      "cache": {
+        "file.path": "hdfs://localhost/griffin/streaming/dump/source",
+        "info.path": "source",
+        "ready.time.interval": "10s",
+        "ready.time.delay": "0",
+        "time.range": ["0", "0"]
+      }
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "profiling",
+        "rule": "source.name.count(), source.age.avg(), source.age.max(), source.age.min() group by source.name",
+        "details": {
+          "source": "source",
+          "profiling": {
+            "name": "prof",
+            "persist.type": "metric"
+          }
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test-profiling.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test-profiling.json b/measure/src/test/resources/config-test-profiling.json
new file mode 100644
index 0000000..187e88a
--- /dev/null
+++ b/measure/src/test/resources/config-test-profiling.json
@@ -0,0 +1,37 @@
+{
+  "name": "prof_batch_test",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.name": "src/test/resources/users_info_src.avro"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "profiling",
+        "rule": "user_id as id, user_id.approx_count_distinct() as cnt group by user_id order by cnt desc, id desc limit 3",
+        "details": {
+          "source": "source",
+          "profiling": {
+            "name": "count",
+            "persist.type": "metric"
+          }
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test.json b/measure/src/test/resources/config-test.json
new file mode 100644
index 0000000..23eb5ff
--- /dev/null
+++ b/measure/src/test/resources/config-test.json
@@ -0,0 +1,55 @@
+{
+  "name": "accu batch test",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.name": "src/test/resources/users_info_src.avro"
+          }
+        }
+      ]
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "avro",
+          "version": "1.7",
+          "config": {
+            "file.name": "src/test/resources/users_info_target.avro"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "spark-sql",
+        "name": "miss.records",
+        "rule": "SELECT source.user_id, source.first_name, source.last_name, source.address, source.email, source.phone, source.post_code FROM source LEFT JOIN target ON coalesce(source.user_id, 'null') = coalesce(target.user_id, 'null') AND coalesce(source.first_name, 'null') = coalesce(target.first_name, 'null') AND coalesce(source.last_name, 'null') = coalesce(target.last_name, 'null') AND coalesce(source.address, 'null') = coalesce(target.address, 'null') AND coalesce(source.email, 'null') = coalesce(target.email, 'null') AND coalesce(source.phone, 'null') = coalesce(target.phone, 'null') AND coalesce(source.post_code, 'null') = coalesce(target.post_code, 'null') WHERE (NOT (source.user_id IS NULL AND source.first_name IS NULL AND source.last_name IS NULL AND source.address IS NULL AND source.email IS NULL AND source.phone IS NULL AND source.post_code IS NULL)) AND (target.user_id IS NULL AND target.first_name IS NULL AND target.last_name IS NULL AND target.address IS NULL AND t
 arget.email IS NULL AND target.phone IS NULL AND target.post_code IS NULL)",
+        "persist.type": "record"
+      }, {
+        "dsl.type": "spark-sql",
+        "name": "miss",
+        "rule": "SELECT COUNT(*) AS `miss` FROM `miss.records`",
+      }, {
+        "dsl.type": "spark-sql",
+        "name": "total",
+        "rule": "SELECT COUNT(*) AS `total` FROM source",
+      }, {
+        "dsl.type": "spark-sql",
+        "name": "accuracy",
+        "rule": "SELECT `total`.`total` AS `total`, `miss`.`miss` AS `miss`, (`total`.`total` - `miss`.`miss`) AS `matched` FROM total JOIN miss",
+        "persist.type": "metric"
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config-test1.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config-test1.json b/measure/src/test/resources/config-test1.json
new file mode 100644
index 0000000..53a8765
--- /dev/null
+++ b/measure/src/test/resources/config-test1.json
@@ -0,0 +1,96 @@
+{
+  "name": "accu batch test",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "src"
+          }
+        }
+      ]
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "tgt"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluateRule": {
+    "rules": [
+      {
+        "dsl.type": "df-opr",
+        "name": "source",
+        "rule": "from_json",
+        "details": {
+          "df.name": "source"
+        }
+      },
+      {
+        "dsl.type": "spark-sql",
+        "name": "seeds",
+        "rule": "SELECT explode(seeds) as seed FROM source"
+      },
+      {
+        "dsl.type": "df-opr",
+        "name": "seeds",
+        "rule": "from_json",
+        "details": {
+          "df.name": "seeds",
+          "col.name": "seed"
+        }
+      },
+      {
+        "dsl.type": "spark-sql",
+        "name": "source",
+        "rule": "SELECT url, get_json_object(metadata, '$.tracker.crawlRequestCreateTS') AS ts FROM seeds"
+      },
+      {
+        "dsl.type": "spark-opr",
+        "name": "target",
+        "rule": "from_json(target.value)"
+      },
+      {
+        "dsl.type": "spark-sql",
+        "name": "attrs",
+        "rule": "SELECT groups[0].attrsList AS attrs FROM target"
+      },
+      {
+        "dsl.type": "spark-sql",
+        "name": "target",
+        "rule": "SELECT attrs.values[index_of(attrs.name, 'URL')][0] AS url, get_json_object(attrs.values[index_of(attrs.name, 'CRAWLMETADATA')][0], '$.tracker.crawlRequestCreateTS') AS ts FROM df2"
+      },
+      {
+        "dsl.type": "spark-sql",
+        "name": "miss.record",
+        "rule": "SELECT source.url, source.ts FROM source LEFT JOIN target ON coalesce(source.url, '') = coalesce(target.url, '') AND coalesce(source.ts, '') = coalesce(target.ts, '') WHERE (NOT (source.url IS NULL AND source.ts IS NULL)) AND (target.url IS NULL AND target.ts IS NULL)",
+        "persist.type": "record"
+      }, {
+        "dsl.type": "spark-sql",
+        "name": "miss.count",
+        "rule": "SELECT COUNT(*) AS `miss.count` FROM `miss.record`",
+        "persist.type": "metric"
+      }, {
+        "dsl.type": "spark-sql",
+        "name": "total.count",
+        "rule": "SELECT COUNT(*) AS `total.count` FROM source",
+        "persist.type": "metric"
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/config.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/config.json b/measure/src/test/resources/config.json
index 08a6021..0a17474 100644
--- a/measure/src/test/resources/config.json
+++ b/measure/src/test/resources/config.json
@@ -22,6 +22,6 @@
 
   "evaluateRule": {
     "sampleRatio": 1,
-    "rules": "$source.user_id + 5 = $target.user_id + (2 + 3) AND $source.first_name + 12 = $target.first_name + (10 + 2) AND $source.last_name = $target.last_name AND $source.address = $target.address AND $source.email = $target.email AND $source.phone = $target.phone AND $source.post_code = $target.post_code WHEN $source.user_id > 10015"
+    "rules": "$source.user_id = $target.user_id AND $source.first_name = $target.first_name AND $source.last_name = $target.last_name AND $source.address = $target.address AND $source.email = $target.email AND $source.phone = $target.phone AND $source.post_code = $target.post_code"
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/env-streaming.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/env-streaming.json b/measure/src/test/resources/env-streaming.json
index 42b4aa9..a01348f 100644
--- a/measure/src/test/resources/env-streaming.json
+++ b/measure/src/test/resources/env-streaming.json
@@ -5,6 +5,7 @@
     "batch.interval": "2s",
     "process.interval": "10s",
     "config": {
+      "spark.master": "local[*]",
       "spark.task.maxFailures": 5,
       "spark.streaming.kafkaMaxRatePerPartition": 1000,
       "spark.streaming.concurrentJobs": 4,

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/env-test.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/env-test.json b/measure/src/test/resources/env-test.json
new file mode 100644
index 0000000..603fad8
--- /dev/null
+++ b/measure/src/test/resources/env-test.json
@@ -0,0 +1,38 @@
+{
+  "spark": {
+    "log.level": "WARN",
+    "checkpoint.dir": "hdfs:///griffin/batch/cp",
+    "batch.interval": "10s",
+    "process.interval": "10m",
+    "config": {
+      "spark.master": "local[*]"
+    }
+  },
+
+  "persist": [
+    {
+      "type": "log",
+      "config": {
+        "max.log.lines": 100
+      }
+    }
+  ],
+
+  "info.cache": [
+    {
+      "type": "zk",
+      "config": {
+        "hosts": "localhost:2181",
+        "namespace": "griffin/infocache",
+        "lock.path": "lock",
+        "mode": "persist",
+        "init.clear": true,
+        "close.clear": false
+      }
+    }
+  ],
+
+  "cleaner": {
+
+  }
+}
\ No newline at end of file


[02/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/test-data.jsonFile
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/test-data.jsonFile b/measure/src/test/resources/test-data.jsonFile
new file mode 100644
index 0000000..73707f4
--- /dev/null
+++ b/measure/src/test/resources/test-data.jsonFile
@@ -0,0 +1,3 @@
+{ "name": "emily", "age": 5, "map": { "a": 1, "b": 2 }, "list": [ { "c": 1, "d": 2 }, { "c": 3, "d": 4 } ], "t": [1, 2, 3] }
+{ "name": "white", "age": 15, "map": { "a": 11, "b": 12 }, "list": [ { "c": 11, "d": 2 }, { "c": 23, "d": 4 } ], "t": [1, 2, 3] }
+{ "name": "west", "age": 25, "map": { "a": 21, "b": 22 }, "list": [ { "c": 11, "d": 2 }, { "c": 23, "d": 4 } ], "t": [1, 2, 3] }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/test-data0.json
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/test-data0.json b/measure/src/test/resources/test-data0.json
new file mode 100644
index 0000000..406acb8
--- /dev/null
+++ b/measure/src/test/resources/test-data0.json
@@ -0,0 +1,56 @@
+[
+  {
+    "name": "emily",
+    "age": 5,
+    "map": {
+      "a": 1,
+      "b": 2
+    },
+    "list": [
+      {
+        "c": 1,
+        "d": 2
+      },
+      {
+        "c": 3,
+        "d": 4
+      }
+    ]
+  },
+  {
+    "name": "white",
+    "age": 15,
+    "map": {
+      "a": 11,
+      "b": 12
+    },
+    "list": [
+      {
+        "c": 11,
+        "d": 2
+      },
+      {
+        "c": 23,
+        "d": 4
+      }
+    ]
+  },
+  {
+    "name": "west",
+    "age": 25,
+    "map": {
+      "a": 21,
+      "b": 22
+    },
+    "list": [
+      {
+        "c": 11,
+        "d": 2
+      },
+      {
+        "c": 23,
+        "d": 4
+      }
+    ]
+  }
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/test-data1.jsonFile
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/test-data1.jsonFile b/measure/src/test/resources/test-data1.jsonFile
new file mode 100644
index 0000000..1e1f28a
--- /dev/null
+++ b/measure/src/test/resources/test-data1.jsonFile
@@ -0,0 +1,31 @@
+[{
+	"Year": "2013",
+	"First Name": "DAVID",
+	"County": "KINGS",
+	"Sex": "M",
+	"Count": "272"
+}, {
+	"Year": "2013",
+	"First Name": "JAYDEN",
+	"County": "KINGS",
+	"Sex": "M",
+	"Count": "268"
+}, {
+	"Year": "2013",
+	"First Name": "JAYDEN",
+	"County": "QUEENS",
+	"Sex": "M",
+	"Count": "219"
+}, {
+	"Year": "2013",
+	"First Name": "MOSHE",
+	"County": "KINGS",
+	"Sex": "M",
+	"Count": "219"
+}, {
+	"Year": "2013",
+	"First Name": "ETHAN",
+	"County": "QUEENS",
+	"Sex": "M",
+	"Count": "216"
+}]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgoTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgoTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgoTest.scala
deleted file mode 100644
index 6a60326..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgoTest.scala
+++ /dev/null
@@ -1,198 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.algo
-//
-//import java.util.Date
-//
-//import org.apache.griffin.measure.algo.batch.BatchAccuracyAlgo
-//import org.apache.griffin.measure.config.params._
-//import org.apache.griffin.measure.config.params.env._
-//import org.apache.griffin.measure.config.params.user._
-//import org.apache.griffin.measure.config.reader._
-//import org.apache.griffin.measure.config.validator._
-//import org.apache.griffin.measure.connector.direct.DirectDataConnector
-//import org.apache.griffin.measure.connector.{DataConnector, DataConnectorFactory}
-//import org.apache.griffin.measure.log.Loggable
-//import org.apache.griffin.measure.rule.expr._
-//import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-//import org.apache.spark.rdd.RDD
-//import org.apache.spark.sql.SQLContext
-//import org.apache.spark.{SparkConf, SparkContext}
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//import scala.util.{Failure, Success, Try}
-//
-//
-//@RunWith(classOf[JUnitRunner])
-//class BatchAccuracyAlgoTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
-//
-//  val envFile = "src/test/resources/env.json"
-//  val confFile = "src/test/resources/config.json"
-////  val confFile = "{\"name\":\"accu1\",\"type\":\"accuracy\",\"source\":{\"type\":\"avro\",\"version\":\"1.7\",\"config\":{\"file.name\":\"src/test/resources/users_info_src.avro\"}},\"target\":{\"type\":\"avro\",\"version\":\"1.7\",\"config\":{\"file.name\":\"src/test/resources/users_info_target.avro\"}},\"evaluateRule\":{\"sampleRatio\":1,\"rules\":\"$source.user_id + 5 = $target.user_id + (2 + 3) AND $source.first_name + 12 = $target.first_name + (10 + 2) AND $source.last_name = $target.last_name AND $source.address = $target.address AND $source.email = $target.email AND $source.phone = $target.phone AND $source.post_code = $target.post_code AND (15 OR true) WHEN true AND $source.user_id > 10020\"}}"
-//  val envFsType = "local"
-//  val userFsType = "local"
-//
-//  val args = Array(envFile, confFile)
-//
-//  var sc: SparkContext = _
-//  var sqlContext: SQLContext = _
-//
-//  var allParam: AllParam = _
-//
-//  before {
-//    // read param files
-//    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    val userParam = readParamFile[UserParam](confFile, userFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    allParam = AllParam(envParam, userParam)
-//
-//    // validate param files
-//    validateParams(allParam) match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-3)
-//      }
-//      case _ => {
-//        info("params validation pass")
-//      }
-//    }
-//
-//    val metricName = userParam.name
-//    val conf = new SparkConf().setMaster("local[*]").setAppName(metricName)
-//    sc = new SparkContext(conf)
-//    sqlContext = new SQLContext(sc)
-//  }
-//
-//  test("algorithm") {
-//    Try {
-//      val envParam = allParam.envParam
-//      val userParam = allParam.userParam
-//
-//      // start time
-//      val startTime = new Date().getTime()
-//
-//      // get spark application id
-//      val applicationId = sc.applicationId
-//
-//      // rules
-//      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-//      val rule: StatementExpr = ruleFactory.generateRule()
-//      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-//
-//      ruleAnalyzer.constCacheExprs.foreach(println)
-//      ruleAnalyzer.constFinalCacheExprs.foreach(println)
-//
-//      // global cache data
-//      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-//      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-//      val finalConstMap = finalConstExprValueMap.headOption match {
-//        case Some(m) => m
-//        case _ => Map[String, Any]()
-//      }
-//
-//      // data connector
-//      val sourceDataConnector: DirectDataConnector =
-//        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.sourceParam,
-//          ruleAnalyzer.sourceRuleExprs, finalConstMap
-//        ) match {
-//          case Success(cntr) => {
-//            if (cntr.available) cntr
-//            else throw new Exception("source data not available!")
-//          }
-//          case Failure(ex) => throw ex
-//        }
-//      val targetDataConnector: DirectDataConnector =
-//        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.targetParam,
-//          ruleAnalyzer.targetRuleExprs, finalConstMap
-//        ) match {
-//          case Success(cntr) => {
-//            if (cntr.available) cntr
-//            else throw new Exception("target data not available!")
-//          }
-//          case Failure(ex) => throw ex
-//        }
-//
-//      // get metadata
-////      val sourceMetaData: Iterable[(String, String)] = sourceDataConnector.metaData() match {
-////        case Success(md) => md
-////        case Failure(ex) => throw ex
-////      }
-////      val targetMetaData: Iterable[(String, String)] = targetDataConnector.metaData() match {
-////        case Success(md) => md
-////        case Failure(ex) => throw ex
-////      }
-//
-//      // get data
-//      val sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = sourceDataConnector.data() match {
-//        case Success(dt) => dt
-//        case Failure(ex) => throw ex
-//      }
-//      val targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = targetDataConnector.data() match {
-//        case Success(dt) => dt
-//        case Failure(ex) => throw ex
-//      }
-//
-//      // my algo
-//      val algo = BatchAccuracyAlgo(allParam)
-//
-//      // accuracy algorithm
-//      val (accuResult, missingRdd, matchedRdd) = algo.accuracy(sourceData, targetData, ruleAnalyzer)
-//
-//      println(s"match percentage: ${accuResult.matchPercentage}, total count: ${accuResult.total}")
-//
-//      missingRdd.map(rec => algo.record2String(rec, ruleAnalyzer.sourceRuleExprs.persistExprs, ruleAnalyzer.targetRuleExprs.persistExprs)).foreach(println)
-//
-//      // end time
-//      val endTime = new Date().getTime
-//      println(s"using time: ${endTime - startTime} ms")
-//    } match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-4)
-//      }
-//      case _ => {
-//        info("calculation finished")
-//      }
-//    }
-//  }
-//
-//  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
-//    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
-//    paramReader.readConfig[T]
-//  }
-//
-//  private def validateParams(allParam: AllParam): Try[Boolean] = {
-//    val allParamValidator = AllParamValidator()
-//    allParamValidator.validate(allParam)
-//  }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgoTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgoTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgoTest.scala
deleted file mode 100644
index e0f500a..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgoTest.scala
+++ /dev/null
@@ -1,173 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.algo
-//
-//import java.util.Date
-//
-//import org.apache.griffin.measure.algo.batch.BatchProfileAlgo
-//import org.apache.griffin.measure.config.params._
-//import org.apache.griffin.measure.config.params.env._
-//import org.apache.griffin.measure.config.params.user._
-//import org.apache.griffin.measure.config.reader._
-//import org.apache.griffin.measure.config.validator._
-//import org.apache.griffin.measure.connector.direct.DirectDataConnector
-//import org.apache.griffin.measure.connector.{DataConnector, DataConnectorFactory}
-//import org.apache.griffin.measure.log.Loggable
-//import org.apache.griffin.measure.rule.expr._
-//import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-//import org.apache.spark.rdd.RDD
-//import org.apache.spark.sql.SQLContext
-//import org.apache.spark.{SparkConf, SparkContext}
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//import scala.util.{Failure, Success, Try}
-//
-//
-//@RunWith(classOf[JUnitRunner])
-//class BatchProfileAlgoTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
-//
-//  val envFile = "src/test/resources/env.json"
-//  val confFile = "src/test/resources/config-profile.json"
-//  val envFsType = "local"
-//  val userFsType = "local"
-//
-//  val args = Array(envFile, confFile)
-//
-//  var sc: SparkContext = _
-//  var sqlContext: SQLContext = _
-//
-//  var allParam: AllParam = _
-//
-//  before {
-//    // read param files
-//    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    val userParam = readParamFile[UserParam](confFile, userFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    allParam = AllParam(envParam, userParam)
-//
-//    // validate param files
-//    validateParams(allParam) match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-3)
-//      }
-//      case _ => {
-//        info("params validation pass")
-//      }
-//    }
-//
-//    val metricName = userParam.name
-//    val conf = new SparkConf().setMaster("local[*]").setAppName(metricName)
-//    sc = new SparkContext(conf)
-//    sqlContext = new SQLContext(sc)
-//  }
-//
-//  test("algorithm") {
-//    Try {
-//      val envParam = allParam.envParam
-//      val userParam = allParam.userParam
-//
-//      // start time
-//      val startTime = new Date().getTime()
-//
-//      // get spark application id
-//      val applicationId = sc.applicationId
-//
-//      // rules
-//      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-//      val rule: StatementExpr = ruleFactory.generateRule()
-//      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-//
-//      ruleAnalyzer.constCacheExprs.foreach(println)
-//      ruleAnalyzer.constFinalCacheExprs.foreach(println)
-//
-//      // global cache data
-//      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-//      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-//      val finalConstMap = finalConstExprValueMap.headOption match {
-//        case Some(m) => m
-//        case _ => Map[String, Any]()
-//      }
-//
-//      // data connector
-//      val sourceDataConnector: DirectDataConnector =
-//        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.sourceParam,
-//          ruleAnalyzer.sourceRuleExprs, finalConstMap
-//        ) match {
-//          case Success(cntr) => {
-//            if (cntr.available) cntr
-//            else throw new Exception("source data not available!")
-//          }
-//          case Failure(ex) => throw ex
-//        }
-//
-//      // get data
-//      val sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = sourceDataConnector.data() match {
-//        case Success(dt) => dt
-//        case Failure(ex) => throw ex
-//      }
-//
-//      // my algo
-//      val algo = BatchProfileAlgo(allParam)
-//
-//      // profile algorithm
-//      val (profileResult, missingRdd, matchedRdd) = algo.profile(sourceData, ruleAnalyzer)
-//
-//      println(s"match percentage: ${profileResult.matchPercentage}, match count: ${profileResult.matchCount}, total count: ${profileResult.totalCount}")
-//
-//      matchedRdd.map(rec => algo.record2String(rec, ruleAnalyzer.sourceRuleExprs.persistExprs)).foreach(println)
-//
-//      // end time
-//      val endTime = new Date().getTime
-//      println(s"using time: ${endTime - startTime} ms")
-//    } match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-4)
-//      }
-//      case _ => {
-//        info("calculation finished")
-//      }
-//    }
-//  }
-//
-//  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
-//    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
-//    paramReader.readConfig[T]
-//  }
-//
-//  private def validateParams(allParam: AllParam): Try[Boolean] = {
-//    val allParamValidator = AllParamValidator()
-//    allParamValidator.validate(allParam)
-//  }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/batch/DataFrameSaveTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/DataFrameSaveTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/batch/DataFrameSaveTest.scala
deleted file mode 100644
index a76712f..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/batch/DataFrameSaveTest.scala
+++ /dev/null
@@ -1,172 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.algo.batch
-//
-//import java.util.Date
-//
-//import org.apache.griffin.measure.config.params._
-//import org.apache.griffin.measure.config.params.env._
-//import org.apache.griffin.measure.config.params.user._
-//import org.apache.griffin.measure.config.reader._
-//import org.apache.griffin.measure.config.validator._
-//import org.apache.griffin.measure.connector.DataConnectorFactory
-//import org.apache.griffin.measure.connector.direct.DirectDataConnector
-//import org.apache.griffin.measure.log.Loggable
-//import org.apache.griffin.measure.rule.expr._
-//import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-//import org.apache.spark.rdd.RDD
-//import org.apache.spark.sql.SQLContext
-//import org.apache.spark.{SparkConf, SparkContext}
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//import scala.util.{Failure, Success, Try}
-//
-//
-//@RunWith(classOf[JUnitRunner])
-//class DataFrameSaveTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
-//
-//  val envFile = "src/test/resources/env.json"
-//  val confFile = "src/test/resources/config-profile.json"
-//  val envFsType = "local"
-//  val userFsType = "local"
-//
-//  val args = Array(envFile, confFile)
-//
-//  var sc: SparkContext = _
-//  var sqlContext: SQLContext = _
-//
-//  var allParam: AllParam = _
-//
-//  before {
-//    // read param files
-//    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    val userParam = readParamFile[UserParam](confFile, userFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    allParam = AllParam(envParam, userParam)
-//
-//    // validate param files
-//    validateParams(allParam) match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-3)
-//      }
-//      case _ => {
-//        info("params validation pass")
-//      }
-//    }
-//
-//    val metricName = userParam.name
-//    val conf = new SparkConf().setMaster("local[*]").setAppName(metricName)
-//    sc = new SparkContext(conf)
-//    sqlContext = new SQLContext(sc)
-//  }
-//
-//  test("algorithm") {
-//    Try {
-//      val envParam = allParam.envParam
-//      val userParam = allParam.userParam
-//
-//      // start time
-//      val startTime = new Date().getTime()
-//
-//      // get spark application id
-//      val applicationId = sc.applicationId
-//
-//      // rules
-//      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-//      val rule: StatementExpr = ruleFactory.generateRule()
-//      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-//
-//      ruleAnalyzer.constCacheExprs.foreach(println)
-//      ruleAnalyzer.constFinalCacheExprs.foreach(println)
-//
-//      // global cache data
-//      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-//      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-//      val finalConstMap = finalConstExprValueMap.headOption match {
-//        case Some(m) => m
-//        case _ => Map[String, Any]()
-//      }
-//
-//      // data connector
-//      val sourceDataConnector: DirectDataConnector =
-//        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.sourceParam,
-//          ruleAnalyzer.sourceRuleExprs, finalConstMap
-//        ) match {
-//          case Success(cntr) => {
-//            if (cntr.available) cntr
-//            else throw new Exception("source data not available!")
-//          }
-//          case Failure(ex) => throw ex
-//        }
-//
-//      // get data
-//      val sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = sourceDataConnector.data() match {
-//        case Success(dt) => dt
-//        case Failure(ex) => throw ex
-//      }
-//
-//      // my algo
-//      val algo = BatchProfileAlgo(allParam)
-//
-//      // profile algorithm
-//      val (profileResult, missingRdd, matchedRdd) = algo.profile(sourceData, ruleAnalyzer)
-//
-//      println(s"match percentage: ${profileResult.matchPercentage}, match count: ${profileResult.matchCount}, total count: ${profileResult.totalCount}")
-//
-//      matchedRdd.map(rec => algo.record2String(rec, ruleAnalyzer.sourceRuleExprs.persistExprs)).foreach(println)
-//
-//      // end time
-//      val endTime = new Date().getTime
-//      println(s"using time: ${endTime - startTime} ms")
-//    } match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-4)
-//      }
-//      case _ => {
-//        info("calculation finished")
-//      }
-//    }
-//  }
-//
-//  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
-//    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
-//    paramReader.readConfig[T]
-//  }
-//
-//  private def validateParams(allParam: AllParam): Try[Boolean] = {
-//    val allParamValidator = AllParamValidator()
-//    allParamValidator.validate(allParam)
-//  }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/core/AccuracyCoreTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/core/AccuracyCoreTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/core/AccuracyCoreTest.scala
deleted file mode 100644
index 2179fba..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/core/AccuracyCoreTest.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.core
-
-import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-import org.apache.griffin.measure.rule.expr._
-import org.apache.griffin.measure.rule.{RuleAnalyzer, RuleFactory}
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-import org.scalatest.PrivateMethodTester
-
-@RunWith(classOf[JUnitRunner])
-class AccuracyCoreTest extends FunSuite with Matchers with BeforeAndAfter with PrivateMethodTester {
-
-  def findExprId(exprs: Iterable[Expr], desc: String): String = {
-    exprs.find(_.desc == desc) match {
-      case Some(expr) => expr._id
-      case _ => ""
-    }
-  }
-
-  test ("match data success") {
-    val rule = "$source.name = $target.name AND $source.age < $target.age"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    val statement = ruleFactory.generateRule
-    val ruleAnalyzer = RuleAnalyzer(statement)
-
-    val sourcePersistExprs = ruleAnalyzer.sourceRuleExprs.persistExprs
-    val targetPersistExprs = ruleAnalyzer.targetRuleExprs.persistExprs
-
-    val source = (Map[String, Any](
-      (findExprId(sourcePersistExprs, "$source['name']") -> "jack"),
-      (findExprId(sourcePersistExprs, "$source['age']") -> 26)
-    ), Map[String, Any]())
-    val target = (Map[String, Any](
-      (findExprId(targetPersistExprs, "$target['name']") -> "jack"),
-      (findExprId(targetPersistExprs, "$target['age']") -> 27)
-    ), Map[String, Any]())
-
-    val matchData = PrivateMethod[(Boolean, Map[String, Any])]('matchData)
-    val result = AccuracyCore invokePrivate matchData(source, target, ruleAnalyzer)
-    result._1 should be (true)
-    result._2.size should be (0)
-  }
-
-  test ("match data fail") {
-    val rule = "$source.name = $target.name AND $source.age = $target.age"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    val statement = ruleFactory.generateRule
-    val ruleAnalyzer = RuleAnalyzer(statement)
-
-    val sourcePersistExprs = ruleAnalyzer.sourceRuleExprs.persistExprs
-    val targetPersistExprs = ruleAnalyzer.targetRuleExprs.persistExprs
-
-    val source = (Map[String, Any](
-      (findExprId(sourcePersistExprs, "$source['name']") -> "jack"),
-      (findExprId(sourcePersistExprs, "$source['age']") -> 26)
-    ), Map[String, Any]())
-    val target = (Map[String, Any](
-      (findExprId(targetPersistExprs, "$target['name']") -> "jack"),
-      (findExprId(targetPersistExprs, "$target['age']") -> 27)
-    ), Map[String, Any]())
-
-    val matchData = PrivateMethod[(Boolean, Map[String, Any])]('matchData)
-    val result = AccuracyCore invokePrivate matchData(source, target, ruleAnalyzer)
-    result._1 should be (false)
-    result._2.size shouldNot be (0)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/core/ProfileCoreTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/core/ProfileCoreTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/core/ProfileCoreTest.scala
deleted file mode 100644
index 087e8e5..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/core/ProfileCoreTest.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.core
-
-import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-import org.apache.griffin.measure.rule.expr._
-import org.apache.griffin.measure.rule.{RuleAnalyzer, RuleFactory}
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-import org.scalatest.PrivateMethodTester
-
-@RunWith(classOf[JUnitRunner])
-class ProfileCoreTest extends FunSuite with Matchers with BeforeAndAfter with PrivateMethodTester {
-
-  def findExprId(exprs: Iterable[Expr], desc: String): String = {
-    exprs.find(_.desc == desc) match {
-      case Some(expr) => expr._id
-      case _ => ""
-    }
-  }
-
-  test ("match data success") {
-    val rule = "$source.name = 'jack' AND $source.age = null"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    val statement = ruleFactory.generateRule
-    val ruleAnalyzer = RuleAnalyzer(statement)
-
-    val sourcePersistExprs = ruleAnalyzer.sourceRuleExprs.persistExprs
-
-    val source = (Map[String, Any](
-      (findExprId(sourcePersistExprs, "$source['name']") -> "jack"),
-      (findExprId(sourcePersistExprs, "$source['age']") -> null)
-    ), Map[String, Any]())
-
-    val matchData = PrivateMethod[(Boolean, Map[String, Any])]('matchData)
-    val result = ProfileCore invokePrivate matchData(source, ruleAnalyzer)
-    result._1 should be (true)
-    result._2.size should be (0)
-  }
-
-  test ("match data fail") {
-    val rule = "$source.name = 'jack' AND $source.age != null"
-    val evaluateRuleParam = EvaluateRuleParam(1.0, rule)
-    val ruleFactory = RuleFactory(evaluateRuleParam)
-    val statement = ruleFactory.generateRule
-    val ruleAnalyzer = RuleAnalyzer(statement)
-
-    val sourcePersistExprs = ruleAnalyzer.sourceRuleExprs.persistExprs
-
-    val source = (Map[String, Any](
-      (findExprId(sourcePersistExprs, "$source['name']") -> "jack"),
-      (findExprId(sourcePersistExprs, "$source['age']") -> null)
-    ), Map[String, Any]())
-
-    val matchData = PrivateMethod[(Boolean, Map[String, Any])]('matchData)
-    val result = ProfileCore invokePrivate matchData(source, ruleAnalyzer)
-    result._1 should be (false)
-    result._2.size shouldNot be (0)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgoTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgoTest.scala b/measure/src/test/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgoTest.scala
deleted file mode 100644
index a22f91f..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgoTest.scala
+++ /dev/null
@@ -1,267 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.algo.streaming
-//
-//import java.util.Date
-//import java.util.concurrent.TimeUnit
-//
-//import org.apache.griffin.measure.algo.batch.BatchAccuracyAlgo
-//import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-//import org.apache.griffin.measure.cache.result._
-//import org.apache.griffin.measure.config.params._
-//import org.apache.griffin.measure.config.params.env._
-//import org.apache.griffin.measure.config.params.user._
-//import org.apache.griffin.measure.config.reader._
-//import org.apache.griffin.measure.config.validator._
-//import org.apache.griffin.measure.connector.direct.DirectDataConnector
-//import org.apache.griffin.measure.connector.{DataConnector, DataConnectorFactory}
-//import org.apache.griffin.measure.log.Loggable
-//import org.apache.griffin.measure.persist.{Persist, PersistFactory, PersistType}
-//import org.apache.griffin.measure.result._
-//import org.apache.griffin.measure.rule.expr._
-//import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-//import org.apache.griffin.measure.utils.{HdfsUtil, TimeUtil}
-//import org.apache.spark.rdd.RDD
-//import org.apache.spark.sql.SQLContext
-//import org.apache.spark.sql.hive.HiveContext
-//import org.apache.spark.streaming.{Milliseconds, StreamingContext}
-//import org.apache.spark.{SparkConf, SparkContext}
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//import scala.util.{Failure, Success, Try}
-//
-//
-//@RunWith(classOf[JUnitRunner])
-//class StreamingAccuracyAlgoTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
-//
-//  val envFile = "src/test/resources/env-streaming.json"
-//  val confFile = "src/test/resources/config-streaming3.json"
-//  val envFsType = "local"
-//  val userFsType = "local"
-//
-//  val args = Array(envFile, confFile)
-//
-//  var sc: SparkContext = _
-//  var sqlContext: SQLContext = _
-////  val ssc: StreamingContext = _
-//
-//  var allParam: AllParam = _
-//
-//  before {
-//    // read param files
-//    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    val userParam = readParamFile[UserParam](confFile, userFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    allParam = AllParam(envParam, userParam)
-//
-//    // validate param files
-//    validateParams(allParam) match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-3)
-//      }
-//      case _ => {
-//        info("params validation pass")
-//      }
-//    }
-//
-//    val metricName = userParam.name
-//    val sparkParam = envParam.sparkParam
-//    val conf = new SparkConf().setMaster("local[*]").setAppName(metricName)
-//    conf.setAll(sparkParam.config)
-//    sc = new SparkContext(conf)
-//    sc.setLogLevel(envParam.sparkParam.logLevel)
-//    sqlContext = new SQLContext(sc)
-////    sqlContext = new HiveContext(sc)
-//
-////    val a = sqlContext.sql("select * from s1 limit 10")
-////    //    val a = sqlContext.sql("show tables")
-////    a.show(10)
-////
-////    val b = HdfsUtil.existPath("/griffin/streaming")
-////    println(b)
-//  }
-//
-//  test("algorithm") {
-//    val envParam = allParam.envParam
-//    val userParam = allParam.userParam
-//    val metricName = userParam.name
-//    val sparkParam = envParam.sparkParam
-//    val cleanerParam = envParam.cleanerParam
-//
-////    val ssc = StreamingContext.getOrCreate(sparkParam.cpDir,
-////      ( ) => {
-////        try {
-////          val batchInterval = TimeUtil.milliseconds(sparkParam.batchInterval) match {
-////            case Some(interval) => Milliseconds(interval)
-////            case _ => throw new Exception("invalid batch interval")
-////          }
-////          val ssc = new StreamingContext(sc, batchInterval)
-////          ssc.checkpoint(sparkParam.cpDir)
-////          ssc
-////        } catch {
-////          case runtime: RuntimeException => {
-////            throw runtime
-////          }
-////        }
-////      })
-//
-//    val batchInterval = TimeUtil.milliseconds(sparkParam.batchInterval) match {
-//      case Some(interval) => Milliseconds(interval)
-//      case _ => throw new Exception("invalid batch interval")
-//    }
-//    val ssc = new StreamingContext(sc, batchInterval)
-//    ssc.checkpoint(sparkParam.cpDir)
-//
-//    // start time
-//    val startTime = new Date().getTime()
-//
-//    val persistFactory = PersistFactory(envParam.persistParams, metricName)
-//
-//    // get persists to persist measure result
-//    val appPersist: Persist = persistFactory.getPersists(startTime)
-//
-//    // get spark application id
-//    val applicationId = sc.applicationId
-//
-//    // persist start id
-//    appPersist.start(applicationId)
-//
-//    InfoCacheInstance.initInstance(envParam.infoCacheParams, metricName)
-//    InfoCacheInstance.init
-//
-//    // generate rule from rule param, generate rule analyzer
-//    val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-//    val rule: StatementExpr = ruleFactory.generateRule()
-//    val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-//
-//    // const expr value map
-//    val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-//    val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-//    val finalConstMap = finalConstExprValueMap.headOption match {
-//      case Some(m) => m
-//      case _ => Map[String, Any]()
-//    }
-//
-//    // data connector
-//    val sourceDataConnector: DirectDataConnector =
-//      DataConnectorFactory.getDirectDataConnector(sqlContext, ssc, userParam.sourceParam,
-//        ruleAnalyzer.sourceRuleExprs, finalConstMap
-//      ) match {
-//        case Success(cntr) => {
-//          if (cntr.available) cntr
-//          else throw new Exception("source data connection error!")
-//        }
-//        case Failure(ex) => throw ex
-//      }
-//    val targetDataConnector: DirectDataConnector =
-//      DataConnectorFactory.getDirectDataConnector(sqlContext, ssc, userParam.targetParam,
-//        ruleAnalyzer.targetRuleExprs, finalConstMap
-//      ) match {
-//        case Success(cntr) => {
-//          if (cntr.available) cntr
-//          else throw new Exception("target data connection error!")
-//        }
-//        case Failure(ex) => throw ex
-//      }
-//
-//    val cacheResultProcesser = CacheResultProcesser()
-//
-//    // init data stream
-//    sourceDataConnector.init()
-//    targetDataConnector.init()
-//
-//    // my algo
-//    val algo = StreamingAccuracyAlgo(allParam)
-//
-//    val streamingAccuracyProcess = StreamingAccuracyProcess(
-//      sourceDataConnector, targetDataConnector,
-//      ruleAnalyzer, cacheResultProcesser, persistFactory, appPersist)
-//
-//    val processInterval = TimeUtil.milliseconds(sparkParam.processInterval) match {
-//      case Some(interval) => interval
-//      case _ => throw new Exception("invalid batch interval")
-//    }
-//    val process = TimingProcess(processInterval, streamingAccuracyProcess)
-//
-//    // clean thread
-////    case class Clean() extends Runnable {
-////      val lock = InfoCacheInstance.genLock("clean")
-////      def run(): Unit = {
-////        val locked = lock.lock(5, TimeUnit.SECONDS)
-////        if (locked) {
-////          try {
-////            sourceDataConnector.cleanData
-////            targetDataConnector.cleanData
-////          } finally {
-////            lock.unlock()
-////          }
-////        }
-////      }
-////    }
-////    val cleanInterval = TimeUtil.milliseconds(cleanerParam.cleanInterval) match {
-////      case Some(interval) => interval
-////      case _ => throw new Exception("invalid batch interval")
-////    }
-////    val clean = TimingProcess(cleanInterval, Clean())
-//
-//    process.startup()
-////    clean.startup()
-//
-//    ssc.start()
-//    ssc.awaitTermination()
-//    ssc.stop(stopSparkContext=true, stopGracefully=true)
-//
-//    println("================ end ================")
-//
-//    // context stop
-//    sc.stop
-//
-//    InfoCacheInstance.close
-//
-//    appPersist.finish()
-//
-//    process.shutdown()
-////    clean.shutdown()
-//  }
-//
-//  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
-//    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
-//    paramReader.readConfig[T]
-//  }
-//
-//  private def validateParams(allParam: AllParam): Try[Boolean] = {
-//    val allParamValidator = AllParamValidator()
-//    allParamValidator.validate(allParam)
-//  }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/config/reader/ParamRawStringReaderTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/config/reader/ParamRawStringReaderTest.scala b/measure/src/test/scala/org/apache/griffin/measure/config/reader/ParamRawStringReaderTest.scala
index b3c94e5..9e5d380 100644
--- a/measure/src/test/scala/org/apache/griffin/measure/config/reader/ParamRawStringReaderTest.scala
+++ b/measure/src/test/scala/org/apache/griffin/measure/config/reader/ParamRawStringReaderTest.scala
@@ -26,7 +26,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
 @RunWith(classOf[JUnitRunner])
 class ParamRawStringReaderTest extends FunSuite with Matchers with BeforeAndAfter {
 
-  test("read config") {
+  test("read raw config") {
     val rawString = """{"type": "hdfs", "config": {"path": "/path/to", "time": 1234567}}"""
 
     val reader = ParamRawStringReader(rawString)
@@ -34,5 +34,4 @@ class ParamRawStringReaderTest extends FunSuite with Matchers with BeforeAndAfte
     paramTry.isSuccess should be (true)
     paramTry.get should be (PersistParam("hdfs", Map[String, Any](("path" -> "/path/to"), ("time" -> 1234567))))
   }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/connector/ConnectorTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/connector/ConnectorTest.scala b/measure/src/test/scala/org/apache/griffin/measure/connector/ConnectorTest.scala
deleted file mode 100644
index 2139ff7..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/connector/ConnectorTest.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector
-
-import java.util.Date
-import java.util.concurrent.TimeUnit
-
-import kafka.serializer.StringDecoder
-import org.apache.griffin.measure.algo.streaming.TimingProcess
-import org.apache.griffin.measure.cache.info.InfoCacheInstance
-import org.apache.griffin.measure.config.params.env._
-import org.apache.griffin.measure.config.params.user.{DataCacheParam, DataConnectorParam, EvaluateRuleParam}
-import org.apache.griffin.measure.config.reader.ParamRawStringReader
-import org.apache.griffin.measure.result.{DataInfo, TimeStampInfo}
-import org.apache.griffin.measure.rule.expr.{Expr, StatementExpr}
-import org.apache.griffin.measure.rule._
-import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, TimeUtil}
-import org.apache.griffin.measure.rule.{DataTypeCalculationUtil, ExprValueUtil, RuleExprs}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.types.{DataType, StructField, StructType}
-import org.apache.spark.sql.{DataFrame, Row, SQLContext}
-import org.apache.spark.streaming.dstream.InputDStream
-import org.apache.spark.streaming.kafka.KafkaUtils
-import org.apache.spark.streaming.{Milliseconds, StreamingContext}
-import org.apache.spark.{SparkConf, SparkContext}
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-
-import scala.reflect.ClassTag
-import scala.util.{Failure, Success, Try}
-
-@RunWith(classOf[JUnitRunner])
-class ConnectorTest extends FunSuite with Matchers with BeforeAndAfter {
-
-  test("read config") {
-
-    val a = "java.lang.String"
-    val at = getClassTag(a)
-    println(at)
-
-    at match {
-      case ClassTag(m) => println(m)
-      case _ => println("no")
-    }
-
-  }
-
-  private def getClassTag(tp: String): ClassTag[_] = {
-    val clazz = Class.forName(tp)
-    ClassTag(clazz)
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/data/connector/ConnectorTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/data/connector/ConnectorTest.scala b/measure/src/test/scala/org/apache/griffin/measure/data/connector/ConnectorTest.scala
new file mode 100644
index 0000000..ead84f7
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/data/connector/ConnectorTest.scala
@@ -0,0 +1,71 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector
+
+import java.util.Date
+import java.util.concurrent.TimeUnit
+
+import kafka.serializer.StringDecoder
+import org.apache.griffin.measure.cache.info.InfoCacheInstance
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user.{DataConnectorParam, EvaluateRuleParam}
+import org.apache.griffin.measure.config.reader.ParamRawStringReader
+import org.apache.griffin.measure.data.connector.batch.TextDirBatchDataConnector
+import org.apache.griffin.measure.process.TimingProcess
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.griffin.measure.result.{DataInfo, TimeStampInfo}
+import org.apache.griffin.measure.rule._
+import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, TimeUtil}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.types.{DataType, StructField, StructType}
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import org.apache.spark.streaming.dstream.InputDStream
+import org.apache.spark.streaming.kafka.KafkaUtils
+import org.apache.spark.streaming.{Milliseconds, StreamingContext}
+import org.apache.spark.{SparkConf, SparkContext}
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+import scala.reflect.ClassTag
+import scala.util.{Failure, Success, Try}
+
+@RunWith(classOf[JUnitRunner])
+class ConnectorTest extends FunSuite with Matchers with BeforeAndAfter {
+
+  test("read config") {
+
+    val a = "java.lang.String"
+    val at = getClassTag(a)
+    println(at)
+
+    at match {
+      case ClassTag(m) => println(m)
+      case _ => println("no")
+    }
+
+  }
+
+  private def getClassTag(tp: String): ClassTag[_] = {
+    val clazz = Class.forName(tp)
+    ClassTag(clazz)
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/process/BatchProcessTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/process/BatchProcessTest.scala b/measure/src/test/scala/org/apache/griffin/measure/process/BatchProcessTest.scala
new file mode 100644
index 0000000..a1e4854
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/process/BatchProcessTest.scala
@@ -0,0 +1,146 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.config.params._
+import org.apache.griffin.measure.config.reader.ParamReaderFactory
+import org.apache.griffin.measure.config.validator.AllParamValidator
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.PersistThreadPool
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+import scala.util.{Failure, Success, Try}
+
+@RunWith(classOf[JUnitRunner])
+class BatchProcessTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
+
+  val envFile = "src/test/resources/env-test.json"
+  val confFile = "src/test/resources/config-test-profiling.json"
+//  val confFile = "src/test/resources/config-test-accuracy.json"
+
+  val envFsType = "local"
+  val userFsType = "local"
+
+  val args = Array(envFile, confFile)
+
+  var allParam: AllParam = _
+
+  before {
+    // read param files
+    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
+      case Success(p) => p
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-2)
+      }
+    }
+    val userParam = readParamFile[UserParam](confFile, userFsType) match {
+      case Success(p) => p
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-2)
+      }
+    }
+    allParam = AllParam(envParam, userParam)
+
+    // validate param files
+    validateParams(allParam) match {
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-3)
+      }
+      case _ => {
+        info("params validation pass")
+      }
+    }
+  }
+
+  test ("batch process") {
+    val procType = ProcessType(allParam.userParam.procType)
+    val proc: DqProcess = procType match {
+      case BatchProcessType => BatchDqProcess(allParam)
+      case StreamingProcessType => StreamingDqProcess(allParam)
+      case _ => {
+        error(s"${procType} is unsupported process type!")
+        sys.exit(-4)
+      }
+    }
+
+    // process init
+    proc.init match {
+      case Success(_) => {
+        info("process init success")
+      }
+      case Failure(ex) => {
+        error(s"process init error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
+      }
+    }
+
+    // process run
+    proc.run match {
+      case Success(_) => {
+        info("process run success")
+      }
+      case Failure(ex) => {
+        error(s"process run error: ${ex.getMessage}")
+
+        if (proc.retriable) {
+          throw ex
+        } else {
+          shutdown
+          sys.exit(-5)
+        }
+      }
+    }
+
+    // process end
+    proc.end match {
+      case Success(_) => {
+        info("process end success")
+      }
+      case Failure(ex) => {
+        error(s"process end error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
+      }
+    }
+
+    shutdown
+  }
+
+  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
+    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
+    paramReader.readConfig[T]
+  }
+
+  private def validateParams(allParam: AllParam): Try[Boolean] = {
+    val allParamValidator = AllParamValidator()
+    allParamValidator.validate(allParam)
+  }
+
+  private def shutdown(): Unit = {
+    PersistThreadPool.shutdown
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/process/JsonParseTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/process/JsonParseTest.scala b/measure/src/test/scala/org/apache/griffin/measure/process/JsonParseTest.scala
new file mode 100644
index 0000000..b119d76
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/process/JsonParseTest.scala
@@ -0,0 +1,531 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import org.apache.griffin.measure.config.params._
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.config.reader.ParamReaderFactory
+import org.apache.griffin.measure.config.validator.AllParamValidator
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.PersistThreadPool
+import org.apache.griffin.measure.process.engine.DataFrameOprs
+import org.apache.griffin.measure.utils.{HdfsUtil, JsonUtil}
+import org.apache.hadoop.hive.ql.exec.UDF
+import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.sql._
+import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
+import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.types._
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+import scala.collection.mutable.WrappedArray
+import scala.util.{Failure, Success, Try}
+
+@RunWith(classOf[JUnitRunner])
+class JsonParseTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
+
+  var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
+
+  before {
+    val conf = new SparkConf().setAppName("test json").setMaster("local[*]")
+    sparkContext = new SparkContext(conf)
+    sparkContext.setLogLevel("WARN")
+//    sqlContext = new HiveContext(sparkContext)
+    sqlContext = new SQLContext(sparkContext)
+  }
+
+  test ("json test") {
+    // 0. prepare data
+//    val dt =
+//      """
+//        |{"name": "s1", "age": 12, "items": [1, 2, 3],
+//        |"subs": [{"id": 1, "type": "seed"}, {"id": 2, "type": "frog"}],
+//        |"inner": {"a": 1, "b": 2}, "jstr": "{\"s1\": \"aaa\", \"s2\": 123}"
+//        |}""".stripMargin
+//    val rdd0 = sparkContext.parallelize(Seq(dt)).map(Row(_))
+    val rdd0 = sparkContext.textFile("src/test/resources/input.msg").map(Row(_))
+
+    val vtp = StructField("value", StringType)
+    val df0 = sqlContext.createDataFrame(rdd0, StructType(Array(vtp)))
+    df0.registerTempTable("src")
+
+//    val fromJson2Array = (s: String) => {
+//      JsonUtil.fromJson[Seq[String]](s)
+//    }
+//    sqlContext.udf.register("from_json_to_array", fromJson2Array)
+//
+//    val df2 = sqlContext.sql("SELECT explode(from_json_to_array(get_json_object(value, '$.seeds'))) as value FROM src")
+//    df2.printSchema
+//    df2.show(10)
+//    df2.registerTempTable("df2")
+
+
+
+    // 1. read from json string to extracted json row
+//    val readSql = "SELECT value FROM src"
+//    val df = sqlContext.sql(readSql)
+//    val df = sqlContext.table("src")
+//    val rdd = df.map { row =>
+//      row.getAs[String]("value")
+//    }
+//    val df1 = sqlContext.read.json(rdd)
+//    df1.printSchema
+//    df1.show(10)
+//    df1.registerTempTable("df1")
+    val details = Map[String, Any](("df.name" -> "src"))
+    val df1 = DataFrameOprs.fromJson(sqlContext, details)
+    df1.registerTempTable("df1")
+
+    // 2. extract json array into lines
+//    val rdd2 = df1.flatMap { row =>
+//      row.getAs[WrappedArray[String]]("seeds")
+//    }
+//    val df2 = sqlContext.read.json(rdd2)
+    val df2 = sqlContext.sql("select explode(seeds) as value from df1")
+//    val tdf = sqlContext.sql("select name, age, explode(items) as item from df1")
+//    tdf.registerTempTable("tdf")
+//    val df2 = sqlContext.sql("select struct(name, age, item) as ttt from tdf")
+    df2.printSchema
+    df2.show(10)
+    df2.registerTempTable("df2")
+    println(df2.count)
+
+    val sql1 = "SELECT value FROM df2"
+    val df22 = sqlContext.sql(sql1)
+    val rdd22 = df22.map { row =>
+      row.getAs[String]("value")
+    }
+    import org.apache.spark.sql.functions._
+    val df23 = sqlContext.read.json(rdd22)
+    df23.registerTempTable("df23")
+//    df23.withColumn("par", monotonicallyIncreasingId)
+
+    val df24 = sqlContext.sql("SELECT url, cast(get_json_object(metadata, '$.tracker.crawlRequestCreateTS') as bigint) as ts FROM df23")
+    df24.printSchema
+    df24.show(10)
+    df24.registerTempTable("df24")
+    println(df24.count)
+
+//    val df25 = sqlContext.sql("select ")
+
+//
+//    // 3. extract json string into row
+////    val df3 = sqlContext.sql("select cast(get_json_object(metadata, '$.tracker.crawlRequestCreateTS') as bigint), url from df2")
+//    val df3 = sqlContext.sql("select cast(get_json_object(get_json_object(value, '$.metadata'), '$.tracker.crawlRequestCreateTS') as bigint), get_json_object(value, '$.url') from df2")
+//    df3.printSchema()
+//    df3.show(10)
+//    println(df3.count)
+
+
+
+//    val df5 = sqlContext.sql("select get_json_object(value, '$.subs') as subs from src")
+//    df5.printSchema()
+//    df5.show(10)
+//    df5.registerTempTable("df5")
+//    val rdd5 = df5.map { row =>
+//      row.getAs[String]("subs")
+//    }
+//    val df6 = sqlContext.read.json(rdd5)
+//    df6.printSchema
+//    df6.show(10)
+
+    // 2. extract json string to row
+//    val df2 = sqlContext.sql("select jstr from df1")
+//    val rdd2 = df2.map { row =>
+//      row.getAs[String]("jstr")
+//    }
+//    val df22 = sqlContext.read.json(rdd2)
+//    df22.printSchema
+//    df22.show(100)
+//    df22.registerTempTable("df2")
+//
+//    val df23 = sqlContext.sql("select json_tuple(jstr, 's1', 's2') from df1")
+//    df23.printSchema()
+//    df23.show(100)
+
+    // 3. extract json array into lines ??
+
+    // 3. flatmap from json row to json row
+//    val df3 = sqlContext.sql("select explode(subs) as sub, items from df1")
+//    df3.printSchema()
+//    df3.show(10)
+//    df3.registerTempTable("df3")
+//
+//    val df4 = sqlContext.sql("select explode(items) as item, sub from df3")
+//    df4.printSchema()
+//    df4.show(10)
+
+//    sqlContext.udf.register("length", (s: WrappedArray[_]) => s.length)
+    //
+    //    val df2 = sqlContext.sql("SELECT inner from df1")
+    //    df2.registerTempTable("df2")
+    //    df2.printSchema
+    //    df2.show(100)
+
+//    def children(colname: String, df: DataFrame): Array[DataFrame] = {
+//      val parent = df.schema.fields.filter(_.name == colname).head
+//      println(parent)
+//      val fields: Array[StructField] = parent.dataType match {
+//        case x: StructType => x.fields
+//        case _ => Array.empty[StructField]
+//      }
+//      fields.map(x => col(s"$colname.${x.name}"))
+////      fields.foreach(println)
+//    }
+////
+//    children("inner", df2)
+//
+//    df2.select(children("bar", df): _*).printSchema
+
+//    val df3 = sqlContext.sql("select inline(subs) from df1")
+//    df3.printSchema()
+//    df3.show(100)
+
+//    val rdd2 = df2.flatMap { row =>
+//      row.getAs[GenericRowWithSchema]("inner") :: Nil
+//    }
+//
+//    rdd2.
+
+//    val funcs = sqlContext.sql("show functions")
+//    funcs.printSchema()
+//    funcs.show(1000)
+//
+//    val desc = sqlContext.sql("describe function inline")
+//    desc.printSchema()
+//    desc.show(100)
+
+    //
+
+  }
+
+  test ("json test 2") {
+    val rdd0 = sparkContext.textFile("src/test/resources/output.msg").map(Row(_))
+
+    val vtp = StructField("value", StringType)
+    val df0 = sqlContext.createDataFrame(rdd0, StructType(Array(vtp)))
+    df0.registerTempTable("tgt")
+
+//    val fromJson2StringArray = (s: String) => {
+//      val seq = JsonUtil.fromJson[Seq[Any]](s)
+//      seq.map(i => JsonUtil.toJson(i))
+//    }
+//    sqlContext.udf.register("from_json_to_string_array", fromJson2StringArray)
+//
+//    val df2 = sqlContext.sql("SELECT from_json_to_string_array(get_json_object(value, '$.groups[0].attrsList')) as value FROM tgt")
+//    df2.printSchema()
+//    df2.show(10)
+//    df2.registerTempTable("df2")
+//
+//    val indexOfStringArray = (sa: String, )
+
+
+    // 1. read from json string to extracted json row
+    val readSql = "SELECT value FROM tgt"
+    val df = sqlContext.sql(readSql)
+    val rdd = df.map { row =>
+      row.getAs[String]("value")
+    }
+    val df1 = sqlContext.read.json(rdd)
+    df1.printSchema
+    df1.show(10)
+    df1.registerTempTable("df1")
+
+
+    val df2 = sqlContext.sql("select groups[0].attrsList as attrs from df1")
+    df2.printSchema
+    df2.show(10)
+    df2.registerTempTable("df2")
+    println(df2.count)
+
+    val indexOf = (arr: Seq[String], v: String) => {
+      arr.indexOf(v)
+    }
+    sqlContext.udf.register("index_of", indexOf)
+
+    val df3 = sqlContext.sql("select attrs.values[index_of(attrs.name, 'URL')][0] as url, cast(get_json_object(attrs.values[index_of(attrs.name, 'CRAWLMETADATA')][0], '$.tracker.crawlRequestCreateTS') as bigint) as ts from df2")
+    df3.printSchema()
+    df3.show(10)
+    df3.registerTempTable("df3")
+  }
+
+  test ("testing") {
+    val dt =
+      """
+        |{"name": "age", "age": 12, "items": [1, 2, 3],
+        |"subs": [{"id": 1, "type": "seed"}, {"id": 2, "type": "frog"}],
+        |"inner": {"a": 1, "b": 2}, "jstr": "{\"s1\": \"aaa\", \"s2\": 123}", "b": true
+        |}""".stripMargin
+    val rdd = sparkContext.parallelize(Seq(dt)).map(Row(_))
+    val vtp = StructField("value", StringType)
+    val df = sqlContext.createDataFrame(rdd, StructType(Array(vtp)))
+    df.registerTempTable("df")
+
+    val df1 = sqlContext.read.json(sqlContext.sql("select * from df").map(r => r.getAs[String]("value")))
+    df1.printSchema()
+    df1.show(10)
+    df1.registerTempTable("df1")
+
+    val test = (s: String) => {
+      s.toInt
+    }
+    sqlContext.udf.register("to_int", test)
+
+    val df2 = sqlContext.sql("select (b) as aa, inner.a from df1 where age = to_int(\"12\")")
+    df2.printSchema()
+    df2.show(10)
+  }
+
+  test ("test input only sql") {
+    val rdd0 = sparkContext.textFile("src/test/resources/input.msg").map(Row(_))
+
+    val vtp = StructField("value", StringType)
+    val df0 = sqlContext.createDataFrame(rdd0, StructType(Array(vtp)))
+    df0.registerTempTable("src")
+    df0.show(10)
+
+    // 1. read from json string to extracted json row
+    val df1 = sqlContext.sql("SELECT get_json_object(value, '$.seeds') as seeds FROM src")
+    df1.printSchema
+    df1.show(10)
+    df1.registerTempTable("df1")
+
+    val json2StringArray: (String) => Seq[String] = (s: String) => {
+      val seq = JsonUtil.fromJson[Seq[String]](s)
+//      seq.map(i => JsonUtil.toJson(i))
+      seq
+    }
+    sqlContext.udf.register("json_to_string_array", json2StringArray)
+
+    val df2 = sqlContext.sql("SELECT explode(json_to_string_array(seeds)) as seed FROM df1")
+    df2.printSchema
+    df2.show(10)
+    df2.registerTempTable("df2")
+
+
+    val df3 = sqlContext.sql("SELECT get_json_object(seed, '$.url') as url, cast(get_json_object(get_json_object(seed, '$.metadata'), '$.tracker.crawlRequestCreateTS') as bigint) as ts FROM df2")
+    df3.printSchema
+    df3.show(10)
+  }
+
+  test ("test output only sql") {
+    val rdd0 = sparkContext.textFile("src/test/resources/output.msg").map(Row(_))
+
+    val vtp = StructField("value", StringType)
+    val df0 = sqlContext.createDataFrame(rdd0, StructType(Array(vtp)))
+    df0.registerTempTable("tgt")
+    df0.printSchema()
+    df0.show(10)
+
+    val json2StringArray: (String) => Seq[String] = (s: String) => {
+      JsonUtil.fromJson[Seq[String]](s)
+    }
+    sqlContext.udf.register("json_to_string_array", json2StringArray)
+
+    val json2StringJsonArray: (String) => Seq[String] = (s: String) => {
+      val seq = JsonUtil.fromJson[Seq[Any]](s)
+      seq.map(i => JsonUtil.toJson(i))
+    }
+    sqlContext.udf.register("json_to_string_json_array", json2StringJsonArray)
+
+    val indexOf = (arr: Seq[String], v: String) => {
+      arr.indexOf(v)
+    }
+    sqlContext.udf.register("index_of", indexOf)
+
+    val indexOfField = (arr: Seq[String], k: String, v: String) => {
+      val seq = arr.flatMap { item =>
+        JsonUtil.fromJson[Map[String, Any]](item).get(k)
+      }
+      seq.indexOf(v)
+    }
+    sqlContext.udf.register("index_of_field", indexOfField)
+
+    // 1. read from json string to extracted json row
+    val df1 = sqlContext.sql("SELECT get_json_object(value, '$.groups[0].attrsList') as attrs FROM tgt")
+    df1.printSchema
+    df1.show(10)
+    df1.registerTempTable("df1")
+
+    val df2 = sqlContext.sql("SELECT json_to_string_json_array(attrs) as attrs FROM df1")
+    df2.printSchema()
+    df2.show(10)
+    df2.registerTempTable("df2")
+
+    val df3 = sqlContext.sql("SELECT attrs[index_of_field(attrs, 'name', 'URL')] as attr1, attrs[index_of_field(attrs, 'name', 'CRAWLMETADATA')] as attr2 FROM df2")
+    df3.printSchema()
+    df3.show(10)
+    df3.registerTempTable("df3")
+
+    val df4 = sqlContext.sql("SELECT json_to_string_array(get_json_object(attr1, '$.values'))[0], cast(get_json_object(json_to_string_array(get_json_object(attr2, '$.values'))[0], '$.tracker.crawlRequestCreateTS') as bigint) FROM df3")
+    df4.printSchema()
+    df4.show(10)
+  }
+
+  test ("test from json") {
+    val fromJson2Map = (str: String) => {
+      val a = JsonUtil.fromJson[Map[String, Any]](str)
+      a.mapValues { v =>
+        v match {
+          case t: String => t
+          case _ => JsonUtil.toJson(v)
+        }
+      }
+    }
+    sqlContext.udf.register("from_json_to_map", fromJson2Map)
+
+    val fromJson2Array = (str: String) => {
+      val a = JsonUtil.fromJson[Seq[Any]](str)
+      a.map { v =>
+        v match {
+          case t: String => t
+          case _ => JsonUtil.toJson(v)
+        }
+      }
+    }
+    sqlContext.udf.register("from_json_to_array", fromJson2Array)
+
+    // ========================
+
+    val srdd = sparkContext.textFile("src/test/resources/input.msg").map(Row(_))
+    val svtp = StructField("value", StringType)
+    val sdf0 = sqlContext.createDataFrame(srdd, StructType(Array(svtp)))
+    sdf0.registerTempTable("sdf0")
+    sdf0.show(10)
+
+    // 1. read from json string to extracted json row
+    val sdf1 = sqlContext.sql("SELECT explode(from_json_to_array(get_json_object(value, '$.seeds'))) as seed FROM sdf0")
+    sdf1.printSchema
+    sdf1.show(10)
+    sdf1.registerTempTable("sdf1")
+
+    val sdf2 = sqlContext.sql("SELECT get_json_object(seed, '$.url') as url, cast(get_json_object(get_json_object(seed, '$.metadata'), '$.tracker.crawlRequestCreateTS') as bigint) as ts FROM sdf1")
+    sdf2.printSchema
+    sdf2.show(10)
+
+    // ---------------------------------------
+
+    val trdd = sparkContext.textFile("src/test/resources/output.msg").map(Row(_))
+    val tvtp = StructField("value", StringType)
+    val tdf0 = sqlContext.createDataFrame(trdd, StructType(Array(tvtp)))
+    tdf0.registerTempTable("tdf0")
+    tdf0.printSchema()
+    tdf0.show(10)
+
+//    val json2StringArray: (String) => Seq[String] = (s: String) => {
+//      JsonUtil.fromJson[Seq[String]](s)
+//    }
+//    sqlContext.udf.register("json_to_string_array", json2StringArray)
+//
+//    val json2StringJsonArray: (String) => Seq[String] = (s: String) => {
+//      val seq = JsonUtil.fromJson[Seq[Any]](s)
+//      seq.map(i => JsonUtil.toJson(i))
+//    }
+//    sqlContext.udf.register("json_to_string_json_array", json2StringJsonArray)
+//
+//    val indexOf = (arr: Seq[String], v: String) => {
+//      arr.indexOf(v)
+//    }
+//    sqlContext.udf.register("index_of", indexOf)
+//
+    val indexOfField = (arr: Seq[String], k: String, v: String) => {
+      val seq = arr.flatMap { item =>
+        JsonUtil.fromJson[Map[String, Any]](item).get(k)
+      }
+      seq.indexOf(v)
+    }
+    sqlContext.udf.register("index_of_field", indexOfField)
+
+    // 1. read from json string to extracted json row
+//    val df1 = sqlContext.sql("SELECT get_json_object(value, '$.groups[0].attrsList') as attrs FROM tdf0")
+    val tdf1 = sqlContext.sql("SELECT from_json_to_array(get_json_object(value, '$.groups[0].attrsList')) as attrs FROM tdf0")
+    tdf1.printSchema
+    tdf1.show(10)
+    tdf1.registerTempTable("tdf1")
+
+//    val tdf2 = sqlContext.sql("SELECT attrs[index_of_field(attrs, 'name', 'URL')] as attr1, attrs[index_of_field(attrs, 'name', 'CRAWLMETADATA')] as attr2 FROM tdf1")
+//    tdf2.printSchema()
+//    tdf2.show(10)
+//    tdf2.registerTempTable("tdf2")
+
+    val tdf3 = sqlContext.sql("SELECT from_json_to_array(get_json_object(attrs[index_of_field(attrs, 'name', 'URL')], '$.values'))[0] as url, cast(get_json_object(from_json_to_array(get_json_object(attrs[index_of_field(attrs, 'name', 'CRAWLMETADATA')], '$.values'))[0], '$.tracker.crawlRequestCreateTS') as bigint) as ts FROM tdf1")
+    tdf3.printSchema()
+    tdf3.show(10)
+  }
+
+  test ("sql functions") {
+    val functions = sqlContext.sql("show functions")
+    functions.printSchema()
+    functions.show(10)
+
+    val functionNames = functions.map(_.getString(0)).collect
+    functionNames.foreach(println)
+  }
+
+  test ("test text file read") {
+    val partitionPaths = Seq[String](
+      "hdfs://localhost/griffin/streaming/dump/source/418010/25080625/1504837518000",
+      "hdfs://localhost/griffin/streaming/dump/target/418010/25080625/1504837518000")
+    val df = sqlContext.read.json(partitionPaths: _*)
+    df.printSchema()
+    df.show(10)
+  }
+
+  test ("list paths") {
+    val filePath = "hdfs://localhost/griffin/streaming/dump/source"
+    val partitionRanges = List[(Long, Long)]((0, 0), (-2, 0))
+    val partitionPaths = listPathsBetweenRanges(filePath :: Nil, partitionRanges)
+    println(partitionPaths)
+  }
+
+  private def listPathsBetweenRanges(paths: List[String],
+                                     partitionRanges: List[(Long, Long)]
+                                    ): List[String] = {
+    partitionRanges match {
+      case Nil => paths
+      case head :: tail => {
+        val (lb, ub) = head
+        val curPaths = paths.flatMap { path =>
+          val names = HdfsUtil.listSubPathsByType(path, "dir").toList
+          println(names)
+          names.filter { name =>
+            str2Long(name) match {
+              case Some(t) => (t >= lb) && (t <= ub)
+              case _ => false
+            }
+          }.map(HdfsUtil.getHdfsFilePath(path, _))
+        }
+        listPathsBetweenRanges(curPaths, tail)
+      }
+    }
+  }
+
+  private def str2Long(str: String): Option[Long] = {
+    try {
+      Some(str.toLong)
+    } catch {
+      case e: Throwable => None
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/process/JsonToStructs.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/process/JsonToStructs.scala b/measure/src/test/scala/org/apache/griffin/measure/process/JsonToStructs.scala
new file mode 100644
index 0000000..394917c
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/process/JsonToStructs.scala
@@ -0,0 +1,85 @@
+package org.apache.griffin.measure.process
+
+import org.apache.griffin.measure.utils.JsonUtil
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
+import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData}
+import org.apache.spark.sql.execution.datasources.json.JSONOptions
+import org.apache.spark.sql.types._
+import org.apache.spark.unsafe.types.UTF8String
+
+
+case class JsonToStructs(
+//                          schema: DataType,
+//                          options: Map[String, String],
+                          child: Expression)
+  extends UnaryExpression with CodegenFallback with ExpectsInputTypes {
+  override def nullable: Boolean = true
+
+//  def this(schema: DataType, options: Map[String, String], child: Expression) =
+//    this(schema, options, child, None)
+
+  // Used in `FunctionRegistry`
+//  def this(child: Expression, schema: Expression) =
+//  this(
+//    schema = JsonExprUtils.validateSchemaLiteral(schema),
+//    options = Map.empty[String, String],
+//    child = child,
+//    timeZoneId = None)
+//
+//  def this(child: Expression, schema: Expression, options: Expression) =
+//    this(
+//      schema = JsonExprUtils.validateSchemaLiteral(schema),
+//      options = JsonExprUtils.convertToMapData(options),
+//      child = child,
+//      timeZoneId = None)
+//
+//  override def checkInputDataTypes(): TypeCheckResult = schema match {
+//    case _: StructType | ArrayType(_: StructType, _) =>
+//      super.checkInputDataTypes()
+//    case _ => TypeCheckResult.TypeCheckFailure(
+//      s"Input schema ${schema.simpleString} must be a struct or an array of structs.")
+//  }
+
+  override def dataType: DataType = MapType(StringType, StringType)
+
+//  override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
+//    copy(timeZoneId = Option(timeZoneId))
+
+  override def nullSafeEval(json: Any): Any = {
+    if (json.toString.trim.isEmpty) return null
+
+    try {
+      JsonUtil.fromJson[Map[String, Any]](json.toString)
+    } catch {
+      case _: Throwable => null
+    }
+  }
+
+  override def inputTypes: Seq[DataType] = StringType :: Nil
+}
+//
+//object JsonExprUtils {
+//
+//  def validateSchemaLiteral(exp: Expression): StructType = exp match {
+//    case Literal(s, StringType) => CatalystSqlParser.parseTableSchema(s.toString)
+//    case e => throw new AnalysisException(s"Expected a string literal instead of $e")
+//  }
+//
+//  def convertToMapData(exp: Expression): Map[String, String] = exp match {
+//    case m: CreateMap
+//      if m.dataType.acceptsType(MapType(StringType, StringType, valueContainsNull = false)) =>
+//      val arrayMap = m.eval().asInstanceOf[ArrayBasedMapData]
+//      ArrayBasedMapData.toScalaMap(arrayMap).map { case (key, value) =>
+//        key.toString -> value.toString
+//      }
+//    case m: CreateMap =>
+//      throw new AnalysisException(
+//        s"A type of keys and values in map() must be string, but got ${m.dataType}")
+//    case _ =>
+//      throw new AnalysisException("Must use a map() function for options")
+//  }
+//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala b/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
new file mode 100644
index 0000000..07b7c5e
--- /dev/null
+++ b/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
@@ -0,0 +1,147 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import org.apache.griffin.measure.config.params._
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.config.reader.ParamReaderFactory
+import org.apache.griffin.measure.config.validator.AllParamValidator
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.PersistThreadPool
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+
+import scala.util.{Failure, Success, Try}
+
+@RunWith(classOf[JUnitRunner])
+class StreamingProcessTest extends FunSuite with Matchers with BeforeAndAfter with Loggable {
+
+  val envFile = "src/test/resources/env-streaming.json"
+//  val confFile = "src/test/resources/config-test-accuracy-streaming-multids.json"
+  val confFile = "src/test/resources/config-test-accuracy-streaming.json"
+//  val confFile = "src/test/resources/config-test-profiling-streaming.json"
+
+  val envFsType = "local"
+  val userFsType = "local"
+
+  val args = Array(envFile, confFile)
+
+  var allParam: AllParam = _
+
+  before {
+    // read param files
+    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
+      case Success(p) => p
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-2)
+      }
+    }
+    val userParam = readParamFile[UserParam](confFile, userFsType) match {
+      case Success(p) => p
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-2)
+      }
+    }
+    allParam = AllParam(envParam, userParam)
+
+    // validate param files
+    validateParams(allParam) match {
+      case Failure(ex) => {
+        error(ex.getMessage)
+        sys.exit(-3)
+      }
+      case _ => {
+        info("params validation pass")
+      }
+    }
+  }
+
+  test ("streaming process") {
+    val procType = ProcessType(allParam.userParam.procType)
+    val proc: DqProcess = procType match {
+      case BatchProcessType => BatchDqProcess(allParam)
+      case StreamingProcessType => StreamingDqProcess(allParam)
+      case _ => {
+        error(s"${procType} is unsupported process type!")
+        sys.exit(-4)
+      }
+    }
+
+    // process init
+    proc.init match {
+      case Success(_) => {
+        info("process init success")
+      }
+      case Failure(ex) => {
+        error(s"process init error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
+      }
+    }
+
+    // process run
+    proc.run match {
+      case Success(_) => {
+        info("process run success")
+      }
+      case Failure(ex) => {
+        error(s"process run error: ${ex.getMessage}")
+
+        if (proc.retriable) {
+          throw ex
+        } else {
+          shutdown
+          sys.exit(-5)
+        }
+      }
+    }
+
+    // process end
+    proc.end match {
+      case Success(_) => {
+        info("process end success")
+      }
+      case Failure(ex) => {
+        error(s"process end error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
+      }
+    }
+
+    shutdown
+  }
+
+  private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {
+    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
+    paramReader.readConfig[T]
+  }
+
+  private def validateParams(allParam: AllParam): Try[Boolean] = {
+    val allParamValidator = AllParamValidator()
+    allParamValidator.validate(allParam)
+  }
+
+  private def shutdown(): Unit = {
+    PersistThreadPool.shutdown
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/scala/org/apache/griffin/measure/rule/ExprValueUtilTest.scala
----------------------------------------------------------------------
diff --git a/measure/src/test/scala/org/apache/griffin/measure/rule/ExprValueUtilTest.scala b/measure/src/test/scala/org/apache/griffin/measure/rule/ExprValueUtilTest.scala
deleted file mode 100644
index dd8d4a0..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/rule/ExprValueUtilTest.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule
-
-import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-import org.apache.griffin.measure.rule.expr.{Expr, StatementExpr}
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-
-@RunWith(classOf[JUnitRunner])
-class ExprValueUtilTest extends FunSuite with BeforeAndAfter with Matchers {
-
-  test ("rule calculation") {
-    //    val rules = "$source.json().name = 's2' and $source.json().age[*] = 32"
-    //    val rules = "$source.json().items[*] = 202 AND $source.json().age[*] = 32 AND $source.json().df[*].a = 1"
-    val rules = "$source.json().items[*] = 202 AND $source.json().age[*] = 32 AND $source.json().df['a' = 1].b = 4"
-    //    val rules = "$source.json().df[0].a = 1"
-    val ep = EvaluateRuleParam(1, rules)
-
-    val ruleFactory = RuleFactory(ep)
-    val rule: StatementExpr = ruleFactory.generateRule()
-    val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-
-    val ruleExprs = ruleAnalyzer.sourceRuleExprs
-    val constFinalExprValueMap = Map[String, Any]()
-
-    val data = List[String](
-      ("""{"name": "s1", "age": [22, 23], "items": [102, 104, 106], "df": [{"a": 1, "b": 3}, {"a": 2, "b": 4}]}"""),
-      ("""{"name": "s2", "age": [32, 33], "items": [202, 204, 206], "df": [{"a": 1, "b": 4}, {"a": 2, "b": 4}]}"""),
-      ("""{"name": "s3", "age": [42, 43], "items": [302, 304, 306], "df": [{"a": 1, "b": 5}, {"a": 2, "b": 4}]}""")
-    )
-
-    def str(expr: Expr) = {
-      s"${expr._id}: ${expr.desc} [${expr.getClass.getSimpleName}]"
-    }
-    println("====")
-    ruleExprs.finalCacheExprs.foreach { expr =>
-      println(str(expr))
-    }
-    println("====")
-    ruleExprs.cacheExprs.foreach { expr =>
-      println(str(expr))
-    }
-
-    val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-    val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-    val finalConstMap = finalConstExprValueMap.headOption match {
-      case Some(m) => m
-      case _ => Map[String, Any]()
-    }
-    println("====")
-    println(ruleAnalyzer.constCacheExprs)
-    println(ruleAnalyzer.constFinalCacheExprs)
-    println(finalConstMap)
-
-    println("====")
-    val valueMaps = data.flatMap { msg =>
-      val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(msg), ruleExprs.cacheExprs, finalConstMap)
-      val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
-
-      finalExprValueMaps
-    }
-
-    valueMaps.foreach(println)
-    println(valueMaps.size)
-
-  }
-
-}



[11/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
Dsl modify

dsl modified in measure module, with document in griffin-doc/dsl-guide.md

Author: Lionel Liu <bh...@163.com>

Closes #123 from bhlx3lyx7/dsl-modify.


Project: http://git-wip-us.apache.org/repos/asf/incubator-griffin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-griffin/commit/4aa6f779
Tree: http://git-wip-us.apache.org/repos/asf/incubator-griffin/tree/4aa6f779
Diff: http://git-wip-us.apache.org/repos/asf/incubator-griffin/diff/4aa6f779

Branch: refs/heads/master
Commit: 4aa6f779969650ee6d1871d342ad7e114b7ed4ce
Parents: ac8351f
Author: Lionel Liu <bh...@163.com>
Authored: Sat Sep 30 16:34:53 2017 +0800
Committer: William Guo <gu...@icloud.com>
Committed: Sat Sep 30 16:34:53 2017 +0800

----------------------------------------------------------------------
 griffin-doc/dsl-guide.md                        |  83 +++
 measure/derby.log                               |  13 +
 measure/src/main/resources/config-old.json      |  31 ++
 measure/src/main/resources/config-sql.json      |  54 ++
 measure/src/main/resources/config.json          |  71 ++-
 .../apache/griffin/measure/Application.scala    | 102 +++-
 .../griffin/measure/algo/AccuracyAlgo.scala     |  24 -
 .../org/apache/griffin/measure/algo/Algo.scala  |  34 --
 .../griffin/measure/algo/MeasureType.scala      |  26 -
 .../griffin/measure/algo/ProcessType.scala      |  26 -
 .../griffin/measure/algo/ProfileAlgo.scala      |  23 -
 .../measure/algo/batch/BatchAccuracyAlgo.scala  | 190 -------
 .../measure/algo/batch/BatchProfileAlgo.scala   | 162 ------
 .../measure/algo/core/AccuracyCore.scala        | 103 ----
 .../griffin/measure/algo/core/ProfileCore.scala |  73 ---
 .../algo/streaming/StreamingAccuracyAlgo.scala  | 358 -------------
 .../streaming/StreamingAccuracyProcess.scala    | 234 --------
 .../measure/algo/streaming/TimingProcess.scala  |  46 --
 .../measure/cache/info/TimeInfoCache.scala      |   2 +-
 .../cache/result/CacheResultProcesser.scala     |   2 +-
 .../config/params/user/DataCacheParam.scala     |  31 --
 .../config/params/user/DataConnectorParam.scala |   6 +-
 .../config/params/user/DataSourceParam.scala    |  31 ++
 .../config/params/user/EvaluateRuleParam.scala  |   4 +-
 .../measure/config/params/user/UserParam.scala  |  10 +-
 .../measure/connector/DataConnector.scala       |  32 --
 .../connector/DataConnectorFactory.scala        | 139 -----
 .../connector/cache/CacheDataConnector.scala    |  33 --
 .../measure/connector/cache/DataCacheable.scala |  86 ---
 .../measure/connector/cache/DataUpdatable.scala |  30 --
 .../cache/HiveCacheDataConnector.scala          | 351 ------------
 .../cache/TextCacheDataConnector.scala          | 311 -----------
 .../direct/AvroDirectDataConnector.scala        | 132 -----
 .../connector/direct/DirectDataConnector.scala  |  34 --
 .../direct/HiveDirectDataConnector.scala        | 158 ------
 .../direct/KafkaCacheDirectDataConnector.scala  | 125 -----
 .../StreamingCacheDirectDataConnector.scala     |  60 ---
 .../streaming/KafkaStreamingDataConnector.scala |  58 --
 .../streaming/StreamingDataConnector.scala      |  34 --
 .../measure/data/connector/DataConnector.scala  | 114 ++++
 .../data/connector/DataConnectorFactory.scala   | 150 ++++++
 .../batch/AvroBatchDataConnector.scala          | 112 ++++
 .../connector/batch/BatchDataConnector.scala    |  35 ++
 .../batch/HiveBatchDataConnector.scala          | 149 ++++++
 .../batch/KafkaCacheDirectDataConnector.scala   | 125 +++++
 .../StreamingCacheDirectDataConnector.scala     |  60 +++
 .../batch/TextDirBatchDataConnector.scala       | 136 +++++
 .../connector/cache/CacheDataConnector.scala    |  33 ++
 .../data/connector/cache/DataCacheable.scala    |  86 +++
 .../data/connector/cache/DataUpdatable.scala    |  30 ++
 .../cache/HiveCacheDataConnector.scala          | 351 ++++++++++++
 .../cache/TextCacheDataConnector.scala          | 311 +++++++++++
 .../streaming/KafkaStreamingDataConnector.scala |  70 +++
 .../KafkaStreamingStringDataConnector.scala     |  65 +++
 .../streaming/StreamingDataConnector.scala      |  43 ++
 .../measure/data/source/DataCacheable.scala     |  76 +++
 .../measure/data/source/DataSource.scala        | 109 ++++
 .../measure/data/source/DataSourceCache.scala   | 347 ++++++++++++
 .../measure/data/source/DataSourceFactory.scala |  80 +++
 .../griffin/measure/persist/HdfsPersist.scala   | 240 ++++++---
 .../griffin/measure/persist/HttpPersist.scala   |  64 ++-
 .../griffin/measure/persist/LoggerPersist.scala | 173 +++---
 .../griffin/measure/persist/MultiPersists.scala |  14 +-
 .../measure/persist/OldHttpPersist.scala        | 174 +++---
 .../griffin/measure/persist/Persist.scala       |  24 +-
 .../measure/persist/PersistFactory.scala        |   4 +-
 .../apache/griffin/measure/process/Algo.scala   |  34 ++
 .../measure/process/BatchDqProcess.scala        | 117 ++++
 .../griffin/measure/process/DqProcess.scala     |  40 ++
 .../griffin/measure/process/ProcessType.scala   |  47 ++
 .../measure/process/StreamingDqProcess.scala    | 157 ++++++
 .../measure/process/StreamingDqThread.scala     | 185 +++++++
 .../griffin/measure/process/TimingProcess.scala |  46 ++
 .../measure/process/check/DataChecker.scala     |  29 +
 .../process/engine/DataFrameOprEngine.scala     | 165 ++++++
 .../measure/process/engine/DqEngine.scala       |  41 ++
 .../process/engine/DqEngineFactory.scala        |  47 ++
 .../measure/process/engine/DqEngines.scala      | 208 ++++++++
 .../measure/process/engine/SparkDqEngine.scala  | 167 ++++++
 .../process/engine/SparkRowFormatter.scala      |  62 +++
 .../measure/process/engine/SparkSqlEngine.scala |  58 ++
 .../griffin/measure/rule/CalculationUtil.scala  | 315 -----------
 .../measure/rule/DataTypeCalculationUtil.scala  | 159 ------
 .../griffin/measure/rule/ExprValueUtil.scala    | 263 ---------
 .../griffin/measure/rule/RuleAnalyzer.scala     |  72 ---
 .../griffin/measure/rule/RuleFactory.scala      |  52 --
 .../griffin/measure/rule/RuleParser.scala       | 244 ---------
 .../measure/rule/SchemaValueCombineUtil.scala   | 187 -------
 .../measure/rule/adaptor/AdaptPhase.scala       |  25 +
 .../rule/adaptor/DataFrameOprAdaptor.scala      |  44 ++
 .../rule/adaptor/GriffinDslAdaptor.scala        | 349 ++++++++++++
 .../measure/rule/adaptor/RuleAdaptor.scala      |  72 +++
 .../measure/rule/adaptor/RuleAdaptorGroup.scala | 105 ++++
 .../measure/rule/adaptor/SparkSqlAdaptor.scala  |  54 ++
 .../griffin/measure/rule/dsl/DqType.scala       |  58 ++
 .../griffin/measure/rule/dsl/DslType.scala      |  58 ++
 .../griffin/measure/rule/dsl/PersistType.scala  |  58 ++
 .../rule/dsl/analyzer/AccuracyAnalyzer.scala    |  41 ++
 .../rule/dsl/analyzer/BasicAnalyzer.scala       |  53 ++
 .../rule/dsl/analyzer/ProfilingAnalyzer.scala   |  52 ++
 .../measure/rule/dsl/expr/AliasableExpr.scala   |  25 +
 .../rule/dsl/expr/ClauseExpression.scala        | 150 ++++++
 .../griffin/measure/rule/dsl/expr/Expr.scala    |  29 +
 .../measure/rule/dsl/expr/FunctionExpr.scala    |  29 +
 .../measure/rule/dsl/expr/LiteralExpr.scala     |  72 +++
 .../measure/rule/dsl/expr/LogicalExpr.scala     | 170 ++++++
 .../measure/rule/dsl/expr/MathExpr.scala        |  80 +++
 .../measure/rule/dsl/expr/SelectExpr.scala      | 115 ++++
 .../measure/rule/dsl/expr/TreeNode.scala        |  45 ++
 .../measure/rule/dsl/parser/BasicParser.scala   | 337 ++++++++++++
 .../rule/dsl/parser/GriffinDslParser.scala      |  50 ++
 .../measure/rule/expr/AnalyzableExpr.scala      |  24 -
 .../griffin/measure/rule/expr/Cacheable.scala   |  33 --
 .../measure/rule/expr/Calculatable.scala        |  25 -
 .../griffin/measure/rule/expr/ClauseExpr.scala  | 109 ----
 .../measure/rule/expr/DataSourceable.scala      |  28 -
 .../griffin/measure/rule/expr/Describable.scala |  33 --
 .../apache/griffin/measure/rule/expr/Expr.scala |  53 --
 .../measure/rule/expr/ExprDescOnly.scala        |  40 --
 .../measure/rule/expr/ExprIdCounter.scala       |  60 ---
 .../measure/rule/expr/FieldDescOnly.scala       |  58 --
 .../griffin/measure/rule/expr/LiteralExpr.scala |  83 ---
 .../griffin/measure/rule/expr/LogicalExpr.scala | 178 -------
 .../griffin/measure/rule/expr/MathExpr.scala    |  99 ----
 .../griffin/measure/rule/expr/SelectExpr.scala  |  88 ---
 .../rule/func/DefaultFunctionDefine.scala       |  36 --
 .../measure/rule/func/FunctionDefine.scala      |  25 -
 .../measure/rule/func/FunctionUtil.scala        |  75 ---
 .../rule/preproc/PreProcRuleGenerator.scala     |  72 +++
 .../measure/rule/step/ConcreteRuleStep.scala    |  37 ++
 .../griffin/measure/rule/step/DfOprStep.scala   |  29 +
 .../measure/rule/step/GriffinDslStep.scala      |  28 +
 .../griffin/measure/rule/step/RuleStep.scala    |  31 ++
 .../measure/rule/step/SparkSqlStep.scala        |  30 ++
 .../griffin/measure/rule/udf/GriffinUdfs.scala  |  33 ++
 .../measure/utils/HdfsFileDumpUtil.scala        |   2 +-
 .../apache/griffin/measure/utils/HdfsUtil.scala |  71 ++-
 .../griffin/measure/utils/ParamUtil.scala       | 164 ++++++
 .../apache/griffin/measure/utils/TimeUtil.scala |   4 +-
 .../config-test-accuracy-streaming-multids.json | 144 +++++
 .../config-test-accuracy-streaming.json         | 119 +++++
 .../test/resources/config-test-accuracy.json    |  56 ++
 .../config-test-profiling-streaming.json        |  68 +++
 .../test/resources/config-test-profiling.json   |  37 ++
 measure/src/test/resources/config-test.json     |  55 ++
 measure/src/test/resources/config-test1.json    |  96 ++++
 measure/src/test/resources/config.json          |   2 +-
 measure/src/test/resources/env-streaming.json   |   1 +
 measure/src/test/resources/env-test.json        |  38 ++
 measure/src/test/resources/input.msg            |   1 +
 measure/src/test/resources/output.msg           |   1 +
 measure/src/test/resources/test-data.jsonFile   |   3 +
 measure/src/test/resources/test-data0.json      |  56 ++
 measure/src/test/resources/test-data1.jsonFile  |  31 ++
 .../algo/batch/BatchAccuracyAlgoTest.scala      | 198 -------
 .../algo/batch/BatchProfileAlgoTest.scala       | 173 ------
 .../measure/algo/batch/DataFrameSaveTest.scala  | 172 ------
 .../measure/algo/core/AccuracyCoreTest.scala    |  89 ----
 .../measure/algo/core/ProfileCoreTest.scala     |  79 ---
 .../streaming/StreamingAccuracyAlgoTest.scala   | 267 ----------
 .../reader/ParamRawStringReaderTest.scala       |   3 +-
 .../measure/connector/ConnectorTest.scala       |  70 ---
 .../measure/data/connector/ConnectorTest.scala  |  71 +++
 .../measure/process/BatchProcessTest.scala      | 146 +++++
 .../griffin/measure/process/JsonParseTest.scala | 531 +++++++++++++++++++
 .../griffin/measure/process/JsonToStructs.scala |  85 +++
 .../measure/process/StreamingProcessTest.scala  | 147 +++++
 .../measure/rule/ExprValueUtilTest.scala        |  86 ---
 .../griffin/measure/rule/RuleAnalyzerTest.scala |  60 ---
 .../griffin/measure/rule/RuleFactoryTest.scala  |  44 --
 .../griffin/measure/rule/RuleParserTest.scala   | 213 --------
 .../rule/adaptor/GriffinDslAdaptorTest.scala    |  65 +++
 .../rule/dsl/parser/BasicParserTest.scala       | 205 +++++++
 .../apache/griffin/measure/sql/SqlTest.scala    | 125 +++++
 .../griffin/measure/utils/HdfsUtilTest.scala    | 132 +++++
 .../griffin/measure/utils/JsonUtilTest.scala    | 120 ++---
 .../griffin/measure/utils/ParamUtilTest.scala   |  50 ++
 177 files changed, 9536 insertions(+), 7114 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/griffin-doc/dsl-guide.md
----------------------------------------------------------------------
diff --git a/griffin-doc/dsl-guide.md b/griffin-doc/dsl-guide.md
new file mode 100644
index 0000000..6a7b3f8
--- /dev/null
+++ b/griffin-doc/dsl-guide.md
@@ -0,0 +1,83 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+# Apache Griffin DSL Guide
+Griffin DSL is designed for DQ measurement, as a SQL-like language, trying to describe the DQ domain request.
+
+## Griffin DSL Syntax Description
+Griffin DSL is SQL-like, case insensitive, and easy to learn.
+
+### Supporting process
+- logical operation: not, and, or, in, between, like, is null, is nan, =, !=, <=, >=, <, >
+- mathematical operation: +, -, *, /, %
+- sql statement: as, where, group by, having, order by, limit
+
+
+### Keywords
+- `null, nan, true, false`
+- `not, and, or`
+- `in, between, like, is`
+- `as, where, group, by, having, order, desc, asc, limit`
+
+### Operators
+- `!, &&, ||, =, !=, <, >, <=, >=, <>`
+- `+, -, *, /, %`
+- `(, )`
+- `., [, ]`
+
+### Literals
+- **string**: any string surrounded with a pair of " or ', with escape charactor \ if any request.  
+	e.g. `"test", 'string 1', "hello \" world \" "`
+- **number**: double or integer number.  
+	e.g. `123, 33.5`
+- **time**: a integer with unit in a string, will be translated to a integer number in millisecond.  
+	e.g. `3d, 5h, 4ms`
+- **boolean**: boolean value directly.  
+	e.g. `true, false`
+
+### Selections
+- **selection head**: data source name.  
+	e.g. `source, target`
+- **all field selection**: * or with data source name ahead.  
+	e.g. `*, source.*, target.*`
+- **field selection**: field name or with data source name ahead.  
+	e.g. `source.age, target.name, user_id`
+- **index selection**: interget between square brackets "[]" with field name ahead.  
+	e.g. `source.attributes[3]`
+- **function selection**: function name with brackets "()", with field name ahead or not.  
+	e.g. `count(*), *.count(), source.user_id.count(), max(source.age)`
+- **alias**: declare an alias after a selection.  
+	e.g. `source.user_id as id, target.user_name as name`
+
+### Math expressions
+- **math factor**: literal or function or selection or math exression with brackets.  
+	e.g. `123, max(1, 2, 3, 4), source.age, (source.age + 13)`
+- **unary math expression**: unary math operator with factor.  
+	e.g. `-(100 - source.score)`
+- **binary math expression**: math factors with binary math operators.  
+	e.g. `source.age + 13, score * 2 + ratio`
+
+### Logical expression
+- **in**: in clause like sql.  
+	e.g. `source.country in ("USA", "CHN", "RSA")`
+- **between**: between clause like sql.  
+	e.g. `source.age between 3 and 30, source.age between (3, 30)`
+- **like**: like clause like sql.  
+	e.g. `source.name like "%abc%"`
+- **logical factor**: 
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/derby.log
----------------------------------------------------------------------
diff --git a/measure/derby.log b/measure/derby.log
new file mode 100644
index 0000000..4b93055
--- /dev/null
+++ b/measure/derby.log
@@ -0,0 +1,13 @@
+----------------------------------------------------------------
+Fri Sep 29 15:53:18 CST 2017:
+Booting Derby version The Apache Software Foundation - Apache Derby - 10.10.2.0 - (1582446): instance a816c00e-015e-cca0-1a8b-00000f890648 
+on database directory /private/var/folders/p0/462y3wrn4lv1fptxx5bwy7b839572r/T/spark-890ab6e2-ee56-4d73-8c6a-0dcce204322e/metastore with class loader sun.misc.Launcher$AppClassLoader@18b4aac2 
+Loaded from file:/Users/lliu13/.m2/repository/org/apache/derby/derby/10.10.2.0/derby-10.10.2.0.jar
+java.vendor=Oracle Corporation
+java.runtime.version=1.8.0_101-b13
+user.dir=/Users/lliu13/git/incubator-griffin/measure
+os.name=Mac OS X
+os.arch=x86_64
+os.version=10.12.6
+derby.system.home=null
+Database Class Loader started - derby.database.classpath=''

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/resources/config-old.json
----------------------------------------------------------------------
diff --git a/measure/src/main/resources/config-old.json b/measure/src/main/resources/config-old.json
new file mode 100644
index 0000000..ab32b75
--- /dev/null
+++ b/measure/src/main/resources/config-old.json
@@ -0,0 +1,31 @@
+{
+  "name": "accu1",
+  "type": "accuracy",
+
+  "process.type": "batch",
+
+  "source": {
+    "type": "hive",
+    "version": "1.2",
+    "config": {
+      "database": "default",
+      "table.name": "users_info_src",
+      "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+    }
+  },
+
+  "target": {
+    "type": "hive",
+    "version": "1.2",
+    "config": {
+      "database": "default",
+      "table.name": "users_info_target",
+      "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+    }
+  },
+
+  "evaluateRule": {
+    "sampleRatio": 0.2,
+    "rules": "$source.user_id = $target.user_id AND $source.first_name = $target.first_name AND $source.last_name = $target.last_name AND $source.address = $target.address AND $source.email = $target.email AND $source.phone = $target.phone AND $source.post_code = $target.post_code"
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/resources/config-sql.json
----------------------------------------------------------------------
diff --git a/measure/src/main/resources/config-sql.json b/measure/src/main/resources/config-sql.json
new file mode 100644
index 0000000..aad9584
--- /dev/null
+++ b/measure/src/main/resources/config-sql.json
@@ -0,0 +1,54 @@
+{
+  "name": "accu1",
+
+  "process.type": "batch",
+
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "users_info_src",
+            "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+          }
+        }
+      ]
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "users_info_target",
+            "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+          }
+        }
+      ]
+    }
+  ],
+
+  "evaluateRule": {
+    "dsl.type": "spark-sql",
+    "rules": [
+      {
+        "name": "miss.record",
+        "rule": "SELECT source.name FROM source LEFT JOIN target ON coalesce(source.name, 'null') = coalesce(target.name, 'null') WHERE (NOT (source.name IS NULL)) AND (target.name IS NULL)",
+        "persist.type": "record"
+      }, {
+        "name": "miss.count",
+        "rule": "SELECT COUNT(*) FROM miss",
+        "persist.type": "metric"
+      }, {
+        "name": "total.count",
+        "rule": "SELECT COUNT(*) FROM source",
+        "persist.type": "metric"
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/resources/config.json
----------------------------------------------------------------------
diff --git a/measure/src/main/resources/config.json b/measure/src/main/resources/config.json
index ab32b75..b6e5af9 100644
--- a/measure/src/main/resources/config.json
+++ b/measure/src/main/resources/config.json
@@ -1,31 +1,60 @@
 {
   "name": "accu1",
-  "type": "accuracy",
 
   "process.type": "batch",
 
-  "source": {
-    "type": "hive",
-    "version": "1.2",
-    "config": {
-      "database": "default",
-      "table.name": "users_info_src",
-      "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+  "data.sources": [
+    {
+      "name": "source",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "users_info_src",
+            "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+          }
+        }
+      ]
+    }, {
+      "name": "target",
+      "connectors": [
+        {
+          "type": "hive",
+          "version": "1.2",
+          "config": {
+            "database": "default",
+            "table.name": "users_info_target",
+            "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
+          }
+        }
+      ]
     }
-  },
-
-  "target": {
-    "type": "hive",
-    "version": "1.2",
-    "config": {
-      "database": "default",
-      "table.name": "users_info_target",
-      "partitions": "dt=23123, hour=432; dt=35464, hour=4657"
-    }
-  },
+  ],
 
   "evaluateRule": {
-    "sampleRatio": 0.2,
-    "rules": "$source.user_id = $target.user_id AND $source.first_name = $target.first_name AND $source.last_name = $target.last_name AND $source.address = $target.address AND $source.email = $target.email AND $source.phone = $target.phone AND $source.post_code = $target.post_code"
+    "rules": [
+      {
+        "dsl.type": "griffin-dsl",
+        "dq.type": "accuracy",
+        "rule": "source.user_id = target.user_id AND source.first_name = target.first_name AND source.last_name = target.last_name AND source.address = target.address AND source.email = target.email AND source.phone = target.phone AND source.post_code = target.post_code",
+        "details": {
+          "source": "source",
+          "miss.record": {
+            "name": "miss.record",
+            "persist.type": "record"
+          },
+          "miss.count": {
+            "name": "miss.count",
+            "persist.type": "metric"
+          },
+          "total.count": {
+            "name": "total.count",
+            "persist.type": "metric"
+          }
+        }
+      }
+    ]
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/Application.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/Application.scala b/measure/src/main/scala/org/apache/griffin/measure/Application.scala
index af8c830..edbb552 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/Application.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/Application.scala
@@ -18,9 +18,6 @@ under the License.
 */
 package org.apache.griffin.measure
 
-import org.apache.griffin.measure.algo._
-import org.apache.griffin.measure.algo.batch._
-import org.apache.griffin.measure.algo.streaming._
 import org.apache.griffin.measure.config.params._
 import org.apache.griffin.measure.config.params.env._
 import org.apache.griffin.measure.config.params.user._
@@ -28,6 +25,7 @@ import org.apache.griffin.measure.config.reader._
 import org.apache.griffin.measure.config.validator.AllParamValidator
 import org.apache.griffin.measure.log.Loggable
 import org.apache.griffin.measure.persist.PersistThreadPool
+import org.apache.griffin.measure.process._
 
 import scala.util.{Failure, Success, Try}
 
@@ -81,39 +79,91 @@ object Application extends Loggable {
     }
 
     // choose algorithm
-    val dqType = allParam.userParam.dqType
-    val procType = allParam.userParam.procType
-    val algo: Algo = (dqType, procType) match {
-      case (MeasureType.accuracy(), ProcessType.batch()) => BatchAccuracyAlgo(allParam)
-      case (MeasureType.profile(), ProcessType.batch()) => BatchProfileAlgo(allParam)
-      case (MeasureType.accuracy(), ProcessType.streaming()) => StreamingAccuracyAlgo(allParam)
-//      case (MeasureType.profile(), ProcessType.streaming()) => StreamingProfileAlgo(allParam)
+//    val dqType = allParam.userParam.dqType
+    val procType = ProcessType(allParam.userParam.procType)
+    val proc: DqProcess = procType match {
+      case BatchProcessType => BatchDqProcess(allParam)
+      case StreamingProcessType => StreamingDqProcess(allParam)
       case _ => {
-        error(s"${dqType} with ${procType} is unsupported dq type!")
+        error(s"${procType} is unsupported process type!")
         sys.exit(-4)
       }
     }
 
-    // algorithm run
-    algo.run match {
+    // process init
+    proc.init match {
+      case Success(_) => {
+        info("process init success")
+      }
       case Failure(ex) => {
-        error(s"app error: ${ex.getMessage}")
-
-        procType match {
-          case ProcessType.streaming() => {
-            // streaming need to attempt more times by spark streaming itself
-            throw ex
-          }
-          case _ => {
-            shutdown
-            sys.exit(-5)
-          }
+        error(s"process init error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
+      }
+    }
+
+    // process run
+    proc.run match {
+      case Success(_) => {
+        info("process run success")
+      }
+      case Failure(ex) => {
+        error(s"process run error: ${ex.getMessage}")
+
+        if (proc.retriable) {
+          throw ex
+        } else {
+          shutdown
+          sys.exit(-5)
         }
       }
-      case _ => {
-        info("app finished and success")
+    }
+
+    // process end
+    proc.end match {
+      case Success(_) => {
+        info("process end success")
+      }
+      case Failure(ex) => {
+        error(s"process end error: ${ex.getMessage}")
+        shutdown
+        sys.exit(-5)
       }
     }
+
+    shutdown
+
+//    val algo: Algo = (dqType, procType) match {
+//      case (MeasureType.accuracy(), ProcessType.batch()) => BatchAccuracyAlgo(allParam)
+//      case (MeasureType.profile(), ProcessType.batch()) => BatchProfileAlgo(allParam)
+//      case (MeasureType.accuracy(), ProcessType.streaming()) => StreamingAccuracyAlgo(allParam)
+////      case (MeasureType.profile(), ProcessType.streaming()) => StreamingProfileAlgo(allParam)
+//      case _ => {
+//        error(s"${dqType} with ${procType} is unsupported dq type!")
+//        sys.exit(-4)
+//      }
+//    }
+
+    // algorithm run
+//    algo.run match {
+//      case Failure(ex) => {
+//        error(s"app error: ${ex.getMessage}")
+//
+//        procType match {
+//          case ProcessType.streaming() => {
+//            // streaming need to attempt more times by spark streaming itself
+//            throw ex
+//          }
+//          case _ => {
+//            shutdown
+//            sys.exit(-5)
+//          }
+//        }
+//      }
+//      case _ => {
+//        info("app finished and success")
+//      }
+//    }
   }
 
   private def readParamFile[T <: Param](file: String, fsType: String)(implicit m : Manifest[T]): Try[T] = {

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/AccuracyAlgo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/AccuracyAlgo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/AccuracyAlgo.scala
deleted file mode 100644
index 7e0a563..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/AccuracyAlgo.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo
-
-
-trait AccuracyAlgo extends Algo {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/Algo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/Algo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/Algo.scala
deleted file mode 100644
index 82b71f1..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/Algo.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo
-
-import org.apache.griffin.measure.config.params.env._
-import org.apache.griffin.measure.config.params.user._
-import org.apache.griffin.measure.log.Loggable
-
-import scala.util.Try
-
-trait Algo extends Loggable with Serializable {
-
-  val envParam: EnvParam
-  val userParam: UserParam
-
-  def run(): Try[_]
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/MeasureType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/MeasureType.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/MeasureType.scala
deleted file mode 100644
index 23d4dac..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/MeasureType.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo
-
-object MeasureType {
-
-  val accuracy = """^(?i)accuracy$""".r
-  val profile = """^(?i)profile$""".r
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/ProcessType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/ProcessType.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/ProcessType.scala
deleted file mode 100644
index 5a85c7c..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/ProcessType.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo
-
-object ProcessType {
-
-  val batch = """^(?i)batch$""".r
-  val streaming = """^(?i)streaming$""".r
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/ProfileAlgo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/ProfileAlgo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/ProfileAlgo.scala
deleted file mode 100644
index 6ffc87a..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/ProfileAlgo.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo
-
-trait ProfileAlgo extends Algo {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgo.scala
deleted file mode 100644
index 241f456..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchAccuracyAlgo.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.batch
-
-import java.util.Date
-
-import org.apache.griffin.measure.algo.AccuracyAlgo
-import org.apache.griffin.measure.algo.core.AccuracyCore
-import org.apache.griffin.measure.config.params.AllParam
-import org.apache.griffin.measure.connector._
-import org.apache.griffin.measure.connector.direct.DirectDataConnector
-import org.apache.griffin.measure.persist._
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.rule._
-import org.apache.griffin.measure.rule.expr._
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.hive.HiveContext
-import org.apache.spark.{SparkConf, SparkContext}
-
-import scala.util.{Failure, Success, Try}
-
-// accuracy algorithm for batch mode
-case class BatchAccuracyAlgo(allParam: AllParam) extends AccuracyAlgo {
-  val envParam = allParam.envParam
-  val userParam = allParam.userParam
-
-  def run(): Try[_] = {
-    Try {
-      val metricName = userParam.name
-
-      val sparkParam = envParam.sparkParam
-
-      val conf = new SparkConf().setAppName(metricName)
-      conf.setAll(sparkParam.config)
-      val sc = new SparkContext(conf)
-      sc.setLogLevel(sparkParam.logLevel)
-      val sqlContext = new HiveContext(sc)
-
-      // start time
-      val startTime = new Date().getTime()
-
-      // get persists to persist measure result
-      val persist: Persist = PersistFactory(envParam.persistParams, metricName).getPersists(startTime)
-
-      // get spark application id
-      val applicationId = sc.applicationId
-
-      // persist start id
-      persist.start(applicationId)
-
-      // generate rule from rule param, generate rule analyzer
-      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-      val rule: StatementExpr = ruleFactory.generateRule()
-      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-
-      // const expr value map
-      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-      val finalConstMap = finalConstExprValueMap.headOption match {
-        case Some(m) => m
-        case _ => Map[String, Any]()
-      }
-
-      // data connector
-      val sourceDataConnector: DirectDataConnector =
-        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.sourceParam,
-          ruleAnalyzer.sourceRuleExprs, finalConstMap
-        ) match {
-          case Success(cntr) => {
-            if (cntr.available) cntr
-            else throw new Exception("source data connection error!")
-          }
-          case Failure(ex) => throw ex
-        }
-      val targetDataConnector: DirectDataConnector =
-        DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.targetParam,
-          ruleAnalyzer.targetRuleExprs, finalConstMap
-        ) match {
-          case Success(cntr) => {
-            if (cntr.available) cntr
-            else throw new Exception("target data connection error!")
-          }
-          case Failure(ex) => throw ex
-        }
-
-      // get metadata
-//      val sourceMetaData: Iterable[(String, String)] = sourceDataConnector.metaData() match {
-//        case Success(md) => md
-//        case _ => throw new Exception("source metadata error!")
-//      }
-//      val targetMetaData: Iterable[(String, String)] = targetDataConnector.metaData() match {
-//        case Success(md) => md
-//        case _ => throw new Exception("target metadata error!")
-//      }
-
-      // get data
-      val sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = sourceDataConnector.data() match {
-        case Success(dt) => dt
-        case Failure(ex) => throw ex
-      }
-      val targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = targetDataConnector.data() match {
-        case Success(dt) => dt
-        case Failure(ex) => throw ex
-      }
-
-      // accuracy algorithm
-      val (accuResult, missingRdd, matchedRdd) = accuracy(sourceData, targetData, ruleAnalyzer)
-
-      // end time
-      val endTime = new Date().getTime
-      persist.log(endTime, s"calculation using time: ${endTime - startTime} ms")
-
-      // persist result
-      persist.result(endTime, accuResult)
-      val missingRecords = missingRdd.map(record2String(_, ruleAnalyzer.sourceRuleExprs.persistExprs, ruleAnalyzer.targetRuleExprs.persistExprs))
-//      persist.missRecords(missingRecords)
-      persist.records(missingRecords, PersistType.MISS)
-
-      // persist end time
-      val persistEndTime = new Date().getTime
-      persist.log(persistEndTime, s"persist using time: ${persistEndTime - endTime} ms")
-
-      // finish
-      persist.finish()
-
-      // context stop
-      sc.stop
-
-    }
-  }
-
-  def wrapInitData(data: Map[String, Any]): (Map[String, Any], Map[String, Any]) = {
-    (data, Map[String, Any]())
-  }
-
-  // calculate accuracy between source data and target data
-  def accuracy(sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-               targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-               ruleAnalyzer: RuleAnalyzer) = {
-    // 1. cogroup
-    val allKvs = sourceData.cogroup(targetData)
-
-    // 2. accuracy calculation
-    val (accuResult, missingRdd, matchedRdd) = AccuracyCore.accuracy(allKvs, ruleAnalyzer)
-
-    (accuResult, missingRdd, matchedRdd)
-  }
-
-  // convert data into a string
-  def record2String(rec: (Product, (Map[String, Any], Map[String, Any])), sourcePersist: Iterable[Expr], targetPersist: Iterable[Expr]): String = {
-    val (key, (data, info)) = rec
-    val persistData = getPersistMap(data, sourcePersist)
-    val persistInfo = info.mapValues { value =>
-      value match {
-        case vd: Map[String, Any] => getPersistMap(vd, targetPersist)
-        case v => v
-      }
-    }.map(identity)
-    s"${persistData} [${persistInfo}]"
-  }
-
-  // get the expr value map of the persist expressions
-  private def getPersistMap(data: Map[String, Any], persist: Iterable[Expr]): Map[String, Any] = {
-    val persistMap = persist.map(e => (e._id, e.desc)).toMap
-    data.flatMap { pair =>
-      val (k, v) = pair
-      persistMap.get(k) match {
-        case Some(d) => Some((d -> v))
-        case _ => None
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgo.scala
deleted file mode 100644
index 163a0b7..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/batch/BatchProfileAlgo.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.batch
-
-import java.util.Date
-
-import org.apache.griffin.measure.algo.ProfileAlgo
-import org.apache.griffin.measure.algo.core.ProfileCore
-import org.apache.griffin.measure.config.params._
-import org.apache.griffin.measure.connector._
-import org.apache.griffin.measure.connector.direct.DirectDataConnector
-import org.apache.griffin.measure.persist.{Persist, PersistFactory, PersistType}
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.rule.expr._
-import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.hive.HiveContext
-import org.apache.spark.{SparkConf, SparkContext}
-
-import scala.util.{Failure, Success, Try}
-
-// profile algorithm for batch mode
-case class BatchProfileAlgo(allParam: AllParam) extends ProfileAlgo {
-  val envParam = allParam.envParam
-  val userParam = allParam.userParam
-
-  def run(): Try[_] = {
-    Try {
-      val metricName = userParam.name
-
-      val sparkParam = envParam.sparkParam
-
-      val conf = new SparkConf().setAppName(metricName)
-      conf.setAll(sparkParam.config)
-      val sc = new SparkContext(conf)
-      sc.setLogLevel(sparkParam.logLevel)
-      val sqlContext = new HiveContext(sc)
-
-      // start time
-      val startTime = new Date().getTime()
-
-      // get persists to persist measure result
-      val persist: Persist = PersistFactory(envParam.persistParams, metricName).getPersists(startTime)
-
-      // get spark application id
-      val applicationId = sc.applicationId
-
-      // persist start id
-      persist.start(applicationId)
-
-      // generate rule from rule param, generate rule analyzer
-      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-      val rule: StatementExpr = ruleFactory.generateRule()
-      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-
-      // const expr value map
-      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-      val finalConstMap = finalConstExprValueMap.headOption match {
-        case Some(m) => m
-        case _ => Map[String, Any]()
-      }
-
-      // data connector
-      val sourceDataConnector: DirectDataConnector =
-      DataConnectorFactory.getDirectDataConnector(sqlContext, null, userParam.sourceParam,
-        ruleAnalyzer.sourceRuleExprs, finalConstMap
-      ) match {
-        case Success(cntr) => {
-          if (cntr.available) cntr
-          else throw new Exception("source data connection error!")
-        }
-        case Failure(ex) => throw ex
-      }
-
-      // get metadata
-      //      val sourceMetaData: Iterable[(String, String)] = sourceDataConnector.metaData() match {
-      //        case Success(md) => md
-      //        case _ => throw new Exception("source metadata error!")
-      //      }
-
-      // get data
-      val sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))] = sourceDataConnector.data() match {
-        case Success(dt) => dt
-        case Failure(ex) => throw ex
-      }
-
-      // profile algorithm
-      val (profileResult, missingRdd, matchedRdd) = profile(sourceData, ruleAnalyzer)
-
-      // end time
-      val endTime = new Date().getTime
-      persist.log(endTime, s"calculation using time: ${endTime - startTime} ms")
-
-      // persist result
-      persist.result(endTime, profileResult)
-      val matchedRecords = matchedRdd.map(record2String(_, ruleAnalyzer.sourceRuleExprs.persistExprs))
-//      persist.matchRecords(matchedRecords)
-      persist.records(matchedRecords, PersistType.MATCH)
-
-      // persist end time
-      val persistEndTime = new Date().getTime
-      persist.log(persistEndTime, s"persist using time: ${persistEndTime - endTime} ms")
-
-      // finish
-      persist.finish()
-
-      // context stop
-      sc.stop
-    }
-  }
-
-  def wrapInitData(data: Map[String, Any]): (Map[String, Any], Map[String, Any]) = {
-    (data, Map[String, Any]())
-  }
-
-  // calculate profile from source data
-  def profile(sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))], ruleAnalyzer: RuleAnalyzer
-              ) = {
-    // 1. profile calculation
-    val (profileResult, missingRdd, matchedRdd) = ProfileCore.profile(sourceData, ruleAnalyzer)
-
-    (profileResult, missingRdd, matchedRdd)
-  }
-
-  // convert data into a string
-  def record2String(rec: (Product, (Map[String, Any], Map[String, Any])), sourcePersist: Iterable[Expr]): String = {
-    val (key, (data, info)) = rec
-    val persistData = getPersistMap(data, sourcePersist)
-    val persistInfo = info
-    if (persistInfo.size > 0) s"${persistData} [${persistInfo}]" else s"${persistData}"
-  }
-
-  // get the expr value map of the persist expressions
-  private def getPersistMap(data: Map[String, Any], persist: Iterable[Expr]): Map[String, Any] = {
-    val persistMap = persist.map(e => (e._id, e.desc)).toMap
-    data.flatMap { pair =>
-      val (k, v) = pair
-      persistMap.get(k) match {
-        case Some(d) => Some((d -> v))
-        case _ => None
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/core/AccuracyCore.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/core/AccuracyCore.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/core/AccuracyCore.scala
deleted file mode 100644
index 4ec6505..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/core/AccuracyCore.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.core
-
-import org.apache.griffin.measure.rule.RuleAnalyzer
-import org.apache.griffin.measure.result._
-import org.apache.spark.rdd.RDD
-
-
-object AccuracyCore {
-
-  type V = Map[String, Any]
-  type T = Map[String, Any]
-
-  // allKvs: rdd of (key, (List[(sourceData, sourceInfo)], List[(targetData, targetInfo)]))
-  // output: accuracy result, missing source data rdd, matched source data rdd
-  def accuracy(allKvs: RDD[(Product, (Iterable[(V, T)], Iterable[(V, T)]))], ruleAnalyzer: RuleAnalyzer
-              ): (AccuracyResult, RDD[(Product, (V, T))], RDD[(Product, (V, T))]) = {
-    val result: RDD[(Long, Long, List[(Product, (V, T))], List[(Product, (V, T))])] = allKvs.map { kv =>
-      val (key, (sourceDatas, targetDatas)) = kv
-
-      // result: (missCount, matchCount, missDataList, matchDataList)
-      val rslt = sourceDatas.foldLeft((0L, 0L, List[(Product, (V, T))](), List[(Product, (V, T))]())) { (sr, sourcePair) =>
-        val matchResult = if (targetDatas.isEmpty) {
-          (false, Map[String, Any](MismatchInfo.wrap("no target")))
-        } else {
-          targetDatas.foldLeft((false, Map[String, Any]())) { (tr, targetPair) =>
-            if (tr._1) tr
-            else matchData(sourcePair, targetPair, ruleAnalyzer)
-          }
-        }
-
-        if (matchResult._1) {
-          val matchItem = (key, sourcePair)
-          (sr._1, sr._2 + 1, sr._3, sr._4 :+ matchItem)
-        } else {
-          val missItem = (key, (sourcePair._1, sourcePair._2 ++ matchResult._2))
-          (sr._1 + 1, sr._2, sr._3 :+ missItem, sr._4)
-        }
-      }
-
-      rslt
-    }
-
-    val missRdd = result.flatMap(_._3)
-    val matchRdd = result.flatMap(_._4)
-
-    def seq(cnt: (Long, Long), rcd: (Long, Long, Any, Any)): (Long, Long) = {
-      (cnt._1 + rcd._1, cnt._2 + rcd._2)
-    }
-    def comb(c1: (Long, Long), c2: (Long, Long)): (Long, Long) = {
-      (c1._1 + c2._1, c1._2 + c2._2)
-    }
-    val countPair = result.aggregate((0L, 0L))(seq, comb)
-
-    (AccuracyResult(countPair._1, (countPair._1 + countPair._2)), missRdd, matchRdd)
-  }
-
-  // try to match source and target data, return true if matched, false if unmatched, also with some matching info
-  private def matchData(source: (V, T), target: (V, T), ruleAnalyzer: RuleAnalyzer): (Boolean, T) = {
-
-    // 1. merge source and target cached data
-    val mergedExprValueMap: Map[String, Any] = mergeExprValueMap(source, target)
-
-    // 2. check valid
-    if (ruleAnalyzer.rule.valid(mergedExprValueMap)) {
-      // 3. substitute the cached data into statement, get the statement value
-      val matched = ruleAnalyzer.rule.calculate(mergedExprValueMap) match {
-        case Some(b: Boolean) => b
-        case _ => false
-      }
-      // currently we can not get the mismatch reason, we need to add such information to figure out how it mismatches
-      if (matched) (matched, Map[String, Any]())
-      else (matched, Map[String, Any](MismatchInfo.wrap("not matched")))
-    } else {
-      (false, Map[String, Any](MismatchInfo.wrap("invalid to compare")))
-    }
-
-  }
-
-//  private def when
-
-  private def mergeExprValueMap(source: (V, T), target: (V, T)): Map[String, Any] = {
-    source._1 ++ target._1
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/core/ProfileCore.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/core/ProfileCore.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/core/ProfileCore.scala
deleted file mode 100644
index 2987f2f..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/core/ProfileCore.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.core
-
-import org.apache.griffin.measure.rule.RuleAnalyzer
-import org.apache.griffin.measure.result._
-import org.apache.spark.rdd.RDD
-
-
-object ProfileCore {
-
-  type V = Map[String, Any]
-  type T = Map[String, Any]
-
-  // dataRdd: rdd of (key, (sourceData, sourceInfo))
-  // output: accuracy result, missing source data rdd, matched source data rdd
-  def profile(dataRdd: RDD[(Product, (V, T))], ruleAnalyzer: RuleAnalyzer
-              ): (ProfileResult, RDD[(Product, (V, T))], RDD[(Product, (V, T))]) = {
-
-    val resultRdd: RDD[((Product, (V, T)), Boolean)] = dataRdd.map { kv =>
-      val (key, (data, info)) = kv
-      val (matched, missInfo) = matchData((data, info), ruleAnalyzer)
-      ((key, (data, info ++ missInfo)), matched)
-    }
-
-    val totalCount = resultRdd.count
-    val matchRdd = resultRdd.filter(_._2).map(_._1)
-    val matchCount = matchRdd.count
-    val missRdd = resultRdd.filter(!_._2).map(_._1)
-    val missCount = missRdd.count
-
-    (ProfileResult(matchCount, totalCount), missRdd, matchRdd)
-
-  }
-
-  // try to match data as rule, return true if matched, false if unmatched
-  private def matchData(dataPair: (V, T), ruleAnalyzer: RuleAnalyzer): (Boolean, T) = {
-
-    val data: Map[String, Any] = dataPair._1
-
-    // 1. check valid
-    if (ruleAnalyzer.rule.valid(data)) {
-      // 2. substitute the cached data into statement, get the statement value
-      val matched = ruleAnalyzer.rule.calculate(data) match {
-        case Some(b: Boolean) => b
-        case _ => false
-      }
-      // currently we can not get the mismatch reason, we need to add such information to figure out how it mismatches
-      if (matched) (matched, Map[String, Any]())
-      else (matched, Map[String, Any](MismatchInfo.wrap("not matched")))
-    } else {
-      (false, Map[String, Any](MismatchInfo.wrap("invalid to compare")))
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgo.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgo.scala
deleted file mode 100644
index bdac64e..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyAlgo.scala
+++ /dev/null
@@ -1,358 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.streaming
-
-import java.util.Date
-import java.util.concurrent.{Executors, ThreadPoolExecutor, TimeUnit}
-
-import org.apache.griffin.measure.algo.AccuracyAlgo
-import org.apache.griffin.measure.algo.core.AccuracyCore
-import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-import org.apache.griffin.measure.cache.result.CacheResultProcesser
-import org.apache.griffin.measure.config.params.AllParam
-import org.apache.griffin.measure.connector._
-import org.apache.griffin.measure.connector.direct.DirectDataConnector
-import org.apache.griffin.measure.persist.{Persist, PersistFactory, PersistType}
-import org.apache.griffin.measure.result.{AccuracyResult, MismatchInfo, TimeStampInfo}
-import org.apache.griffin.measure.rule.{ExprValueUtil, RuleAnalyzer, RuleFactory}
-import org.apache.griffin.measure.rule.expr._
-import org.apache.griffin.measure.utils.TimeUtil
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.hive.HiveContext
-import org.apache.spark.streaming.{Milliseconds, Seconds, StreamingContext}
-import org.apache.spark.{SparkConf, SparkContext}
-
-import scala.util.{Failure, Success, Try}
-
-
-case class StreamingAccuracyAlgo(allParam: AllParam) extends AccuracyAlgo {
-  val envParam = allParam.envParam
-  val userParam = allParam.userParam
-
-  def run(): Try[_] = {
-    Try {
-      val metricName = userParam.name
-
-      val sparkParam = envParam.sparkParam
-
-      val conf = new SparkConf().setAppName(metricName)
-      conf.setAll(sparkParam.config)
-      val sc = new SparkContext(conf)
-      sc.setLogLevel(sparkParam.logLevel)
-      val sqlContext = new HiveContext(sc)
-//      val sqlContext = new SQLContext(sc)
-
-      val batchInterval = TimeUtil.milliseconds(sparkParam.batchInterval) match {
-        case Some(interval) => Milliseconds(interval)
-        case _ => throw new Exception("invalid batch interval")
-      }
-      val ssc = new StreamingContext(sc, batchInterval)
-      ssc.checkpoint(sparkParam.cpDir)
-
-      // init info cache instance
-      InfoCacheInstance.initInstance(envParam.infoCacheParams, metricName)
-      InfoCacheInstance.init
-
-      // start time
-      val startTime = new Date().getTime()
-
-      val persistFactory = PersistFactory(envParam.persistParams, metricName)
-
-      // get persists to persist measure result
-      val appPersist: Persist = persistFactory.getPersists(startTime)
-
-      // get spark application id
-      val applicationId = sc.applicationId
-
-      // persist start id
-      appPersist.start(applicationId)
-
-      // generate rule from rule param, generate rule analyzer
-      val ruleFactory = RuleFactory(userParam.evaluateRuleParam)
-      val rule: StatementExpr = ruleFactory.generateRule()
-      val ruleAnalyzer: RuleAnalyzer = RuleAnalyzer(rule)
-
-      // const expr value map
-      val constExprValueMap = ExprValueUtil.genExprValueMaps(None, ruleAnalyzer.constCacheExprs, Map[String, Any]())
-      val finalConstExprValueMap = ExprValueUtil.updateExprValueMaps(ruleAnalyzer.constFinalCacheExprs, constExprValueMap)
-      val finalConstMap = finalConstExprValueMap.headOption match {
-        case Some(m) => m
-        case _ => Map[String, Any]()
-      }
-
-      // data connector
-      val sourceDataConnector: DirectDataConnector =
-      DataConnectorFactory.getDirectDataConnector(sqlContext, ssc, userParam.sourceParam,
-        ruleAnalyzer.sourceRuleExprs, finalConstMap
-      ) match {
-        case Success(cntr) => {
-          if (cntr.available) cntr
-          else throw new Exception("source data connection error!")
-        }
-        case Failure(ex) => throw ex
-      }
-      val targetDataConnector: DirectDataConnector =
-        DataConnectorFactory.getDirectDataConnector(sqlContext, ssc, userParam.targetParam,
-          ruleAnalyzer.targetRuleExprs, finalConstMap
-        ) match {
-          case Success(cntr) => {
-            if (cntr.available) cntr
-            else throw new Exception("target data connection error!")
-          }
-          case Failure(ex) => throw ex
-        }
-
-      val cacheResultProcesser = CacheResultProcesser()
-
-      // init data stream
-      sourceDataConnector.init()
-      targetDataConnector.init()
-
-      val streamingAccuracyProcess = StreamingAccuracyProcess(
-        sourceDataConnector, targetDataConnector,
-        ruleAnalyzer, cacheResultProcesser, persistFactory, appPersist)
-
-      // process thread
-//      case class Process() extends Runnable {
-//        val lock = InfoCacheInstance.genLock("process")
-//        def run(): Unit = {
-//          val updateTime = new Date().getTime
-//          val locked = lock.lock(5, TimeUnit.SECONDS)
-//          if (locked) {
-//            try {
-//              val st = new Date().getTime
-//
-//              TimeInfoCache.startTimeInfoCache
-//
-//              // get data
-//              val sourceData = sourceDataConnector.data match {
-//                case Success(dt) => dt
-//                case Failure(ex) => throw ex
-//              }
-//              val targetData = targetDataConnector.data match {
-//                case Success(dt) => dt
-//                case Failure(ex) => throw ex
-//              }
-//
-//              sourceData.cache
-//              targetData.cache
-//
-//              println(s"sourceData.count: ${sourceData.count}")
-//              println(s"targetData.count: ${targetData.count}")
-//
-//              // accuracy algorithm
-//              val (accuResult, missingRdd, matchedRdd) = accuracy(sourceData, targetData, ruleAnalyzer)
-//              println(s"accuResult: ${accuResult}")
-//
-//              val ct = new Date().getTime
-//              appPersist.log(ct, s"calculation using time: ${ct - st} ms")
-//
-//              sourceData.unpersist()
-//              targetData.unpersist()
-//
-//              // result of every group
-//              val matchedGroups = reorgByTimeGroup(matchedRdd)
-//              val matchedGroupCount = matchedGroups.count
-//              println(s"===== matchedGroupCount: ${matchedGroupCount} =====")
-//
-//              // get missing results
-//              val missingGroups = reorgByTimeGroup(missingRdd)
-//              val missingGroupCount = missingGroups.count
-//              println(s"===== missingGroupCount: ${missingGroupCount} =====")
-//
-//              val groups = matchedGroups.cogroup(missingGroups)
-//              val groupCount = groups.count
-//              println(s"===== groupCount: ${groupCount} =====")
-//
-//              val updateResults = groups.flatMap { group =>
-//                val (t, (matchData, missData)) = group
-//
-//                val matchSize = matchData.size
-//                val missSize = missData.size
-//                val res = AccuracyResult(missSize, matchSize + missSize)
-//
-//                val updatedCacheResultOpt = cacheResultProcesser.genUpdateCacheResult(t, updateTime, res)
-//
-//                updatedCacheResultOpt.flatMap { updatedCacheResult =>
-//                  Some((updatedCacheResult, (t, missData)))
-//                }
-//              }
-//
-//              updateResults.cache
-//
-//              val updateResultsPart =  updateResults.map(_._1)
-//              val updateDataPart =  updateResults.map(_._2)
-//
-//              val updateResultsArray = updateResultsPart.collect()
-//
-//              // update results cache (in driver)
-//              // collect action is traversable once action, it will make rdd updateResults empty
-//              updateResultsArray.foreach { updateResult =>
-//                println(s"update result: ${updateResult}")
-//                cacheResultProcesser.update(updateResult)
-//                // persist result
-//                val persist: Persist = persistFactory.getPersists(updateResult.timeGroup)
-//                persist.result(updateTime, updateResult.result)
-//              }
-//
-//              // record missing data and update old data (in executor)
-//              updateDataPart.foreach { grp =>
-//                val (t, datas) = grp
-//                val persist: Persist = persistFactory.getPersists(t)
-//                // persist missing data
-//                val missStrings = datas.map { row =>
-//                  val (_, (value, info)) = row
-//                  s"${value} [${info.getOrElse(MismatchInfo.key, "unknown")}]"
-//                }
-//                persist.records(missStrings, PersistType.MISS)
-//                // data connector update old data
-//                val dumpDatas = datas.map { r =>
-//                  val (_, (v, i)) = r
-//                  v ++ i
-//                }
-//
-//                println(t)
-//                dumpDatas.foreach(println)
-//
-//                sourceDataConnector.updateOldData(t, dumpDatas)
-//                targetDataConnector.updateOldData(t, dumpDatas)    // not correct
-//              }
-//
-//              updateResults.unpersist()
-//
-//              // dump missing rdd   (this part not need for future version, only for current df cache data version)
-//              val dumpRdd: RDD[Map[String, Any]] = missingRdd.map { r =>
-//                val (_, (v, i)) = r
-//                v ++ i
-//              }
-//              sourceDataConnector.updateAllOldData(dumpRdd)
-//              targetDataConnector.updateAllOldData(dumpRdd)    // not correct
-//
-//              TimeInfoCache.endTimeInfoCache
-//
-//              val et = new Date().getTime
-//              appPersist.log(et, s"persist using time: ${et - ct} ms")
-//
-//            } catch {
-//              case e: Throwable => error(s"process error: ${e.getMessage}")
-//            } finally {
-//              lock.unlock()
-//            }
-//          }
-//        }
-//      }
-
-      val processInterval = TimeUtil.milliseconds(sparkParam.processInterval) match {
-        case Some(interval) => interval
-        case _ => throw new Exception("invalid batch interval")
-      }
-      val process = TimingProcess(processInterval, streamingAccuracyProcess)
-
-      // clean thread
-//    case class Clean() extends Runnable {
-//      val lock = InfoCacheInstance.genLock("clean")
-//      def run(): Unit = {
-//        val locked = lock.lock(5, TimeUnit.SECONDS)
-//        if (locked) {
-//          try {
-//            sourceDataConnector.cleanData
-//            targetDataConnector.cleanData
-//          } finally {
-//            lock.unlock()
-//          }
-//        }
-//      }
-//    }
-//    val cleanInterval = TimeUtil.milliseconds(cleanerParam.cleanInterval) match {
-//      case Some(interval) => interval
-//      case _ => throw new Exception("invalid batch interval")
-//    }
-//    val clean = TimingProcess(cleanInterval, Clean())
-
-      process.startup()
-//    clean.startup()
-
-      ssc.start()
-      ssc.awaitTermination()
-      ssc.stop(stopSparkContext=true, stopGracefully=true)
-
-      // context stop
-      sc.stop
-
-      InfoCacheInstance.close
-
-      appPersist.finish()
-
-      process.shutdown()
-//    clean.shutdown()
-    }
-  }
-
-  // calculate accuracy between source data and target data
-//  def accuracy(sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-//               targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-//               ruleAnalyzer: RuleAnalyzer) = {
-//    // 1. cogroup
-//    val allKvs = sourceData.cogroup(targetData)
-//
-//    // 2. accuracy calculation
-//    val (accuResult, missingRdd, matchedRdd) = AccuracyCore.accuracy(allKvs, ruleAnalyzer)
-//
-//    (accuResult, missingRdd, matchedRdd)
-//  }
-
-//  // convert data into a string
-//  def record2String(rec: (Product, (Map[String, Any], Map[String, Any])), sourcePersist: Iterable[Expr], targetPersist: Iterable[Expr]): String = {
-//    val (key, (data, info)) = rec
-//    val persistData = getPersistMap(data, sourcePersist)
-//    val persistInfo = info.mapValues { value =>
-//      value match {
-//        case vd: Map[String, Any] => getPersistMap(vd, targetPersist)
-//        case v => v
-//      }
-//    }.map(identity)
-//    s"${persistData} [${persistInfo}]"
-//  }
-//
-//  // get the expr value map of the persist expressions
-//  private def getPersistMap(data: Map[String, Any], persist: Iterable[Expr]): Map[String, Any] = {
-//    val persistMap = persist.map(e => (e._id, e.desc)).toMap
-//    data.flatMap { pair =>
-//      val (k, v) = pair
-//      persistMap.get(k) match {
-//        case Some(d) => Some((d -> v))
-//        case _ => None
-//      }
-//    }
-//  }
-
-//  def reorgByTimeGroup(rdd: RDD[(Product, (Map[String, Any], Map[String, Any]))]
-//                      ): RDD[(Long, (Product, (Map[String, Any], Map[String, Any])))] = {
-//    rdd.flatMap { row =>
-//      val (key, (value, info)) = row
-//      val b: Option[(Long, (Product, (Map[String, Any], Map[String, Any])))] = info.get(TimeStampInfo.key) match {
-//        case Some(t: Long) => Some((t, row))
-//        case _ => None
-//      }
-//      b
-//    }
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyProcess.scala
deleted file mode 100644
index be1f846..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/StreamingAccuracyProcess.scala
+++ /dev/null
@@ -1,234 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.streaming
-
-import java.util.Date
-import java.util.concurrent.TimeUnit
-
-import org.apache.griffin.measure.algo.core.AccuracyCore
-import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-import org.apache.griffin.measure.cache.result.CacheResultProcesser
-import org.apache.griffin.measure.connector.direct.DirectDataConnector
-import org.apache.griffin.measure.log.Loggable
-import org.apache.griffin.measure.persist._
-import org.apache.griffin.measure.result.{AccuracyResult, MismatchInfo, TimeStampInfo}
-import org.apache.griffin.measure.rule._
-import org.apache.griffin.measure.rule.expr._
-import org.apache.spark.rdd.RDD
-
-import scala.util.{Failure, Success}
-
-case class StreamingAccuracyProcess(sourceDataConnector: DirectDataConnector,
-                                    targetDataConnector: DirectDataConnector,
-                                    ruleAnalyzer: RuleAnalyzer,
-                                    cacheResultProcesser: CacheResultProcesser,
-                                    persistFactory: PersistFactory,
-                                    appPersist: Persist
-                                   ) extends Runnable with Loggable {
-
-  val lock = InfoCacheInstance.genLock("process")
-
-  def run(): Unit = {
-//    println(s"cache count: ${cacheResultProcesser.cacheGroup.size}")
-    val updateTimeDate = new Date()
-    val updateTime = updateTimeDate.getTime
-    println(s"===== [${updateTimeDate}] process begins =====")
-    val locked = lock.lock(5, TimeUnit.SECONDS)
-    if (locked) {
-      try {
-        val st = new Date().getTime
-
-        TimeInfoCache.startTimeInfoCache
-
-        // get data
-        val sourceData = sourceDataConnector.data match {
-          case Success(dt) => dt
-          case Failure(ex) => throw ex
-        }
-        val targetData = targetDataConnector.data match {
-          case Success(dt) => dt
-          case Failure(ex) => throw ex
-        }
-
-        sourceData.cache
-        targetData.cache
-
-        println(s"sourceData.count: ${sourceData.count}")
-        println(s"targetData.count: ${targetData.count}")
-
-        // accuracy algorithm
-        val (accuResult, missingRdd, matchedRdd) = accuracy(sourceData, targetData, ruleAnalyzer)
-//        println(s"accuResult: ${accuResult}")
-
-        val ct = new Date().getTime
-        appPersist.log(ct, s"calculation using time: ${ct - st} ms")
-
-        sourceData.unpersist()
-        targetData.unpersist()
-
-        // result of every group
-        val matchedGroups = reorgByTimeGroup(matchedRdd)
-//        val matchedGroupCount = matchedGroups.count
-//        println(s"===== matchedGroupCount: ${matchedGroupCount} =====")
-
-        // get missing results
-        val missingGroups = reorgByTimeGroup(missingRdd)
-//        val missingGroupCount = missingGroups.count
-//        println(s"===== missingGroupCount: ${missingGroupCount} =====")
-
-        val groups = matchedGroups.cogroup(missingGroups)
-//        val groupCount = groups.count
-//        println(s"===== groupCount: ${groupCount} =====")
-
-        val updateResults = groups.flatMap { group =>
-          val (t, (matchData, missData)) = group
-
-          val matchSize = matchData.size
-          val missSize = missData.size
-          val res = AccuracyResult(missSize, matchSize + missSize)
-
-          val updatedCacheResultOpt = cacheResultProcesser.genUpdateCacheResult(t, updateTime, res)
-
-          updatedCacheResultOpt.flatMap { updatedCacheResult =>
-            Some((updatedCacheResult, (t, missData)))
-          }
-        }
-
-        updateResults.cache
-
-        val updateResultsPart =  updateResults.map(_._1)
-        val updateDataPart =  updateResults.map(_._2)
-
-        val updateResultsArray = updateResultsPart.collect()
-
-        // update results cache (in driver)
-        // collect action is traversable once action, it will make rdd updateResults empty
-        updateResultsArray.foreach { updateResult =>
-//          println(s"update result: ${updateResult}")
-          cacheResultProcesser.update(updateResult)
-          // persist result
-          val persist: Persist = persistFactory.getPersists(updateResult.timeGroup)
-          persist.result(updateTime, updateResult.result)
-        }
-
-        // record missing data and dump old data (in executor)
-        updateDataPart.foreach { grp =>
-          val (t, datas) = grp
-          val persist: Persist = persistFactory.getPersists(t)
-          // persist missing data
-          val missStrings = datas.map { row =>
-            record2String(row, ruleAnalyzer.sourceRuleExprs.persistExprs, ruleAnalyzer.targetRuleExprs.persistExprs)
-          }
-          persist.records(missStrings, PersistType.MISS)
-
-          // data connector update old data
-          val dumpDatas = datas.map { r =>
-            val (_, (v, i)) = r
-            v ++ i
-          }
-          sourceDataConnector.updateOldData(t, dumpDatas)
-//          targetDataConnector.updateOldData(t, dumpDatas)    // not correct
-        }
-
-        updateResults.unpersist()
-
-        TimeInfoCache.endTimeInfoCache
-
-        // clean old data
-        cleanData()
-
-        val et = new Date().getTime
-        appPersist.log(et, s"persist using time: ${et - ct} ms")
-
-      } catch {
-        case e: Throwable => error(s"process error: ${e.getMessage}")
-      } finally {
-        lock.unlock()
-      }
-    } else {
-      println(s"===== [${updateTimeDate}] process ignores =====")
-    }
-    val endTime = new Date().getTime
-    println(s"===== [${updateTimeDate}] process ends, using ${endTime - updateTime} ms =====")
-  }
-
-  // clean old data and old result cache
-  def cleanData(): Unit = {
-    try {
-      sourceDataConnector.cleanOldData
-      targetDataConnector.cleanOldData
-
-      val cleanTime = TimeInfoCache.getCleanTime
-      cacheResultProcesser.refresh(cleanTime)
-    } catch {
-      case e: Throwable => error(s"clean data error: ${e.getMessage}")
-    }
-  }
-
-  // calculate accuracy between source data and target data
-  private def accuracy(sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-               targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
-               ruleAnalyzer: RuleAnalyzer) = {
-    // 1. cogroup
-    val allKvs = sourceData.cogroup(targetData)
-
-    // 2. accuracy calculation
-    val (accuResult, missingRdd, matchedRdd) = AccuracyCore.accuracy(allKvs, ruleAnalyzer)
-
-    (accuResult, missingRdd, matchedRdd)
-  }
-
-  private def reorgByTimeGroup(rdd: RDD[(Product, (Map[String, Any], Map[String, Any]))]
-                      ): RDD[(Long, (Product, (Map[String, Any], Map[String, Any])))] = {
-    rdd.flatMap { row =>
-      val (key, (value, info)) = row
-      val b: Option[(Long, (Product, (Map[String, Any], Map[String, Any])))] = info.get(TimeStampInfo.key) match {
-        case Some(t: Long) => Some((t, row))
-        case _ => None
-      }
-      b
-    }
-  }
-
-  // convert data into a string
-  def record2String(rec: (Product, (Map[String, Any], Map[String, Any])), dataPersist: Iterable[Expr], infoPersist: Iterable[Expr]): String = {
-    val (key, (data, info)) = rec
-    val persistData = getPersistMap(data, dataPersist)
-    val persistInfo = info.mapValues { value =>
-      value match {
-        case vd: Map[String, Any] => getPersistMap(vd, infoPersist)
-        case v => v
-      }
-    }.map(identity)
-    s"${persistData} [${persistInfo}]"
-  }
-
-  // get the expr value map of the persist expressions
-  private def getPersistMap(data: Map[String, Any], persist: Iterable[Expr]): Map[String, Any] = {
-    val persistMap = persist.map(e => (e._id, e.desc)).toMap
-    data.flatMap { pair =>
-      val (k, v) = pair
-      persistMap.get(k) match {
-        case Some(d) => Some((d -> v))
-        case _ => None
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/TimingProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/TimingProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/TimingProcess.scala
deleted file mode 100644
index e5bd7de..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/algo/streaming/TimingProcess.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.algo.streaming
-
-import java.util.concurrent.{Executors, ThreadPoolExecutor, TimeUnit}
-import java.util.{Timer, TimerTask}
-
-case class TimingProcess(interval: Long, runnable: Runnable) {
-
-  val pool: ThreadPoolExecutor = Executors.newFixedThreadPool(5).asInstanceOf[ThreadPoolExecutor]
-
-  val timer = new Timer("process", true)
-
-  val timerTask = new TimerTask() {
-    override def run(): Unit = {
-      pool.submit(runnable)
-    }
-  }
-
-  def startup(): Unit = {
-    timer.schedule(timerTask, interval, interval)
-  }
-
-  def shutdown(): Unit = {
-    timer.cancel()
-    pool.shutdown()
-    pool.awaitTermination(10, TimeUnit.SECONDS)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/cache/info/TimeInfoCache.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/cache/info/TimeInfoCache.scala b/measure/src/main/scala/org/apache/griffin/measure/cache/info/TimeInfoCache.scala
index ac0acff..b581a58 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/cache/info/TimeInfoCache.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/cache/info/TimeInfoCache.scala
@@ -109,7 +109,7 @@ object TimeInfoCache extends Loggable with Serializable {
         case _ => -1
       }
     } catch {
-      case _ => -1
+      case e: Throwable => -1
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/cache/result/CacheResultProcesser.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/cache/result/CacheResultProcesser.scala b/measure/src/main/scala/org/apache/griffin/measure/cache/result/CacheResultProcesser.scala
index 50d3ada..9916e92 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/cache/result/CacheResultProcesser.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/cache/result/CacheResultProcesser.scala
@@ -23,7 +23,7 @@ import org.apache.griffin.measure.result._
 
 import scala.collection.mutable.{Map => MutableMap}
 
-case class CacheResultProcesser() extends Loggable {
+object CacheResultProcesser extends Loggable {
 
   val cacheGroup: MutableMap[Long, CacheResult] = MutableMap()
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataCacheParam.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataCacheParam.scala b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataCacheParam.scala
deleted file mode 100644
index 9c60755..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataCacheParam.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.config.params.user
-
-import com.fasterxml.jackson.annotation.{JsonInclude, JsonProperty}
-import com.fasterxml.jackson.annotation.JsonInclude.Include
-import org.apache.griffin.measure.config.params.Param
-
-@JsonInclude(Include.NON_NULL)
-case class DataCacheParam( @JsonProperty("type") cacheType: String,
-                           @JsonProperty("config") config: Map[String, Any],
-                           @JsonProperty("time.range") timeRange: List[String]
-                         ) extends Param {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataConnectorParam.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataConnectorParam.scala b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataConnectorParam.scala
index dbc2e0b..a819997 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataConnectorParam.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataConnectorParam.scala
@@ -26,12 +26,8 @@ import org.apache.griffin.measure.config.params.Param
 case class DataConnectorParam( @JsonProperty("type") conType: String,
                                @JsonProperty("version") version: String,
                                @JsonProperty("config") config: Map[String, Any],
-                               @JsonProperty("cache") cache: DataCacheParam,
-                               @JsonProperty("match.once") matchOnce: Boolean
+                               @JsonProperty("pre.proc") preProc: List[Map[String, Any]]
                              ) extends Param {
 
-  def getMatchOnce(): Boolean = {
-    if (matchOnce == null) false else matchOnce
-  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataSourceParam.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataSourceParam.scala b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataSourceParam.scala
new file mode 100644
index 0000000..b638234
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/DataSourceParam.scala
@@ -0,0 +1,31 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.config.params.user
+
+import com.fasterxml.jackson.annotation.{JsonInclude, JsonProperty}
+import com.fasterxml.jackson.annotation.JsonInclude.Include
+import org.apache.griffin.measure.config.params.Param
+
+@JsonInclude(Include.NON_NULL)
+case class DataSourceParam( @JsonProperty("name") name: String,
+                            @JsonProperty("connectors") connectors: List[DataConnectorParam],
+                            @JsonProperty("cache") cache: Map[String, Any]
+                          ) extends Param {
+
+}



[08/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/HttpPersist.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/HttpPersist.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/HttpPersist.scala
index 6d5bac3..225ee41 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/HttpPersist.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/HttpPersist.scala
@@ -21,8 +21,10 @@ package org.apache.griffin.measure.persist
 import org.apache.griffin.measure.result._
 import org.apache.griffin.measure.utils.{HttpUtil, JsonUtil}
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
 
 import scala.util.Try
+import org.apache.griffin.measure.utils.ParamUtil._
 
 // persist result by http way
 case class HttpPersist(config: Map[String, Any], metricName: String, timeStamp: Long) extends Persist {
@@ -30,8 +32,10 @@ case class HttpPersist(config: Map[String, Any], metricName: String, timeStamp:
   val Api = "api"
   val Method = "method"
 
-  val api = config.getOrElse(Api, "").toString
-  val method = config.getOrElse(Method, "post").toString
+  val api = config.getString(Api, "")
+  val method = config.getString(Method, "post")
+
+  val _Value = "value"
 
   def available(): Boolean = {
     api.nonEmpty
@@ -40,21 +44,21 @@ case class HttpPersist(config: Map[String, Any], metricName: String, timeStamp:
   def start(msg: String): Unit = {}
   def finish(): Unit = {}
 
-  def result(rt: Long, result: Result): Unit = {
-    result match {
-      case ar: AccuracyResult => {
-        val dataMap = Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp), ("total" -> ar.getTotal), ("matched" -> ar.getMatch))
-        httpResult(dataMap)
-      }
-      case pr: ProfileResult => {
-        val dataMap = Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp), ("total" -> pr.getTotal), ("matched" -> pr.getMatch))
-        httpResult(dataMap)
-      }
-      case _ => {
-        info(s"result: ${result}")
-      }
-    }
-  }
+//  def result(rt: Long, result: Result): Unit = {
+//    result match {
+//      case ar: AccuracyResult => {
+//        val dataMap = Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp), ("total" -> ar.getTotal), ("matched" -> ar.getMatch))
+//        httpResult(dataMap)
+//      }
+//      case pr: ProfileResult => {
+//        val dataMap = Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp), ("total" -> pr.getTotal), ("matched" -> pr.getMatch))
+//        httpResult(dataMap)
+//      }
+//      case _ => {
+//        info(s"result: ${result}")
+//      }
+//    }
+//  }
 
   private def httpResult(dataMap: Map[String, Any]) = {
     try {
@@ -77,12 +81,34 @@ case class HttpPersist(config: Map[String, Any], metricName: String, timeStamp:
 
   }
 
-  def records(recs: RDD[String], tp: String): Unit = {}
-  def records(recs: Iterable[String], tp: String): Unit = {}
+//  def records(recs: RDD[String], tp: String): Unit = {}
+//  def records(recs: Iterable[String], tp: String): Unit = {}
 
 //  def missRecords(records: RDD[String]): Unit = {}
 //  def matchRecords(records: RDD[String]): Unit = {}
 
   def log(rt: Long, msg: String): Unit = {}
 
+//  def persistRecords(df: DataFrame, name: String): Unit = {}
+  def persistRecords(records: Iterable[String], name: String): Unit = {}
+
+//  def persistMetrics(metrics: Seq[String], name: String): Unit = {
+//    val maps = metrics.flatMap { m =>
+//      try {
+//        Some(JsonUtil.toAnyMap(m) ++ Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp)))
+//      } catch {
+//        case e: Throwable => None
+//      }
+//    }
+//    maps.foreach { map =>
+//      httpResult(map)
+//    }
+//  }
+
+  def persistMetrics(metrics: Map[String, Any]): Unit = {
+    val head = Map[String, Any](("name" -> metricName), ("tmst" -> timeStamp))
+    val result = head + (_Value -> metrics)
+    httpResult(result)
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/LoggerPersist.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/LoggerPersist.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/LoggerPersist.scala
index 00d41ea..0cd6f6b 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/LoggerPersist.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/LoggerPersist.scala
@@ -21,98 +21,151 @@ package org.apache.griffin.measure.persist
 import java.util.Date
 
 import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.utils.HdfsUtil
+import org.apache.griffin.measure.utils.{HdfsUtil, JsonUtil}
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+import org.apache.griffin.measure.utils.ParamUtil._
 
 // persist result and data to hdfs
 case class LoggerPersist(config: Map[String, Any], metricName: String, timeStamp: Long) extends Persist {
 
   val MaxLogLines = "max.log.lines"
 
-  val maxLogLines = try { config.getOrElse(MaxLogLines, 100).toString.toInt } catch { case _ => 100 }
+  val maxLogLines = config.getInt(MaxLogLines, 100)
 
   def available(): Boolean = true
 
   def start(msg: String): Unit = {
-    println(s"[${timeStamp}] ${metricName} start")
+    println(s"[${timeStamp}] ${metricName} start: ${msg}")
   }
   def finish(): Unit = {
     println(s"[${timeStamp}] ${metricName} finish")
   }
 
-  def result(rt: Long, result: Result): Unit = {
-    try {
-      val resStr = result match {
-        case ar: AccuracyResult => {
-          s"match percentage: ${ar.matchPercentage}\ntotal count: ${ar.getTotal}\nmiss count: ${ar.getMiss}, match count: ${ar.getMatch}"
-        }
-        case pr: ProfileResult => {
-          s"match percentage: ${pr.matchPercentage}\ntotal count: ${pr.getTotal}\nmiss count: ${pr.getMiss}, match count: ${pr.getMatch}"
-        }
-        case _ => {
-          s"result: ${result}"
-        }
-      }
-      println(s"[${timeStamp}] ${metricName} result: \n${resStr}")
-    } catch {
-      case e: Throwable => error(e.getMessage)
-    }
+//  def result(rt: Long, result: Result): Unit = {
+//    try {
+//      val resStr = result match {
+//        case ar: AccuracyResult => {
+//          s"match percentage: ${ar.matchPercentage}\ntotal count: ${ar.getTotal}\nmiss count: ${ar.getMiss}, match count: ${ar.getMatch}"
+//        }
+//        case pr: ProfileResult => {
+//          s"match percentage: ${pr.matchPercentage}\ntotal count: ${pr.getTotal}\nmiss count: ${pr.getMiss}, match count: ${pr.getMatch}"
+//        }
+//        case _ => {
+//          s"result: ${result}"
+//        }
+//      }
+//      println(s"[${timeStamp}] ${metricName} result: \n${resStr}")
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+//
+//  // need to avoid string too long
+//  private def rddRecords(records: RDD[String]): Unit = {
+//    try {
+//      val recordCount = records.count.toInt
+//      val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
+//      if (count > 0) {
+//        val recordsArray = records.take(count)
+////        recordsArray.foreach(println)
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+
+//  private def iterableRecords(records: Iterable[String]): Unit = {
+//    try {
+//      val recordCount = records.size
+//      val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
+//      if (count > 0) {
+//        val recordsArray = records.take(count)
+////        recordsArray.foreach(println)
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+
+//  def records(recs: RDD[String], tp: String): Unit = {
+//    tp match {
+//      case PersistDataType.MISS => rddRecords(recs)
+//      case PersistDataType.MATCH => rddRecords(recs)
+//      case _ => {}
+//    }
+//  }
+//
+//  def records(recs: Iterable[String], tp: String): Unit = {
+//    tp match {
+//      case PersistDataType.MISS => iterableRecords(recs)
+//      case PersistDataType.MATCH => iterableRecords(recs)
+//      case _ => {}
+//    }
+//  }
+
+//  def missRecords(records: RDD[String]): Unit = {
+//    warn(s"[${timeStamp}] ${metricName} miss records: ")
+//    rddRecords(records)
+//  }
+//  def matchRecords(records: RDD[String]): Unit = {
+//    warn(s"[${timeStamp}] ${metricName} match records: ")
+//    rddRecords(records)
+//  }
+
+  def log(rt: Long, msg: String): Unit = {
+    println(s"[${timeStamp}] ${rt}: ${msg}")
   }
 
-  // need to avoid string too long
-  private def rddRecords(records: RDD[String]): Unit = {
-    try {
-      val recordCount = records.count.toInt
-      val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
-      if (count > 0) {
-        val recordsArray = records.take(count)
+//  def persistRecords(df: DataFrame, name: String): Unit = {
+//    val records = df.toJSON
+//    println(s"${name} [${timeStamp}] records: ")
+//    try {
+//      val recordCount = records.count.toInt
+//      val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
+//      if (count > 0) {
+//        val recordsArray = records.take(count)
 //        recordsArray.foreach(println)
-      }
-    } catch {
-      case e: Throwable => error(e.getMessage)
-    }
-  }
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
 
-  private def iterableRecords(records: Iterable[String]): Unit = {
+  def persistRecords(records: Iterable[String], name: String): Unit = {
     try {
       val recordCount = records.size
       val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
       if (count > 0) {
-        val recordsArray = records.take(count)
-//        recordsArray.foreach(println)
+        records.foreach(println)
       }
     } catch {
       case e: Throwable => error(e.getMessage)
     }
   }
 
-  def records(recs: RDD[String], tp: String): Unit = {
-    tp match {
-      case PersistType.MISS => rddRecords(recs)
-      case PersistType.MATCH => rddRecords(recs)
-      case _ => {}
-    }
-  }
-
-  def records(recs: Iterable[String], tp: String): Unit = {
-    tp match {
-      case PersistType.MISS => iterableRecords(recs)
-      case PersistType.MATCH => iterableRecords(recs)
-      case _ => {}
-    }
-  }
-
-//  def missRecords(records: RDD[String]): Unit = {
-//    warn(s"[${timeStamp}] ${metricName} miss records: ")
-//    rddRecords(records)
-//  }
-//  def matchRecords(records: RDD[String]): Unit = {
-//    warn(s"[${timeStamp}] ${metricName} match records: ")
-//    rddRecords(records)
+//  def persistMetrics(metrics: Seq[String], name: String): Unit = {
+//    try {
+//      val recordCount = metrics.size
+//      val count = if (maxLogLines < 0) recordCount else scala.math.min(maxLogLines, recordCount)
+//      if (count > 0) {
+//        val recordsArray = metrics.take(count)
+//        recordsArray.foreach(println)
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
 //  }
 
-  def log(rt: Long, msg: String): Unit = {
-    println(s"[${timeStamp}] ${rt}: ${msg}")
+  def persistMetrics(metrics: Map[String, Any]): Unit = {
+    println(s"${metricName} [${timeStamp}] metrics: ")
+    val json = JsonUtil.toJson(metrics)
+    println(json)
+//    metrics.foreach { metric =>
+//      val (key, value) = metric
+//      println(s"${key}: ${value}")
+//    }
   }
 
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/MultiPersists.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/MultiPersists.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/MultiPersists.scala
index 25c8b0b..0b7c98c 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/MultiPersists.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/MultiPersists.scala
@@ -21,6 +21,7 @@ package org.apache.griffin.measure.persist
 import org.apache.griffin.measure.result._
 import org.apache.griffin.measure.utils.{HttpUtil, JsonUtil}
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
 
 import scala.util.Try
 
@@ -39,14 +40,19 @@ case class MultiPersists(persists: Iterable[Persist]) extends Persist {
   def start(msg: String): Unit = { persists.foreach(_.start(msg)) }
   def finish(): Unit = { persists.foreach(_.finish()) }
 
-  def result(rt: Long, result: Result): Unit = { persists.foreach(_.result(rt, result)) }
-
-  def records(recs: RDD[String], tp: String): Unit = { persists.foreach(_.records(recs, tp)) }
-  def records(recs: Iterable[String], tp: String): Unit = { persists.foreach(_.records(recs, tp)) }
+//  def result(rt: Long, result: Result): Unit = { persists.foreach(_.result(rt, result)) }
+//
+//  def records(recs: RDD[String], tp: String): Unit = { persists.foreach(_.records(recs, tp)) }
+//  def records(recs: Iterable[String], tp: String): Unit = { persists.foreach(_.records(recs, tp)) }
 
 //  def missRecords(records: RDD[String]): Unit = { persists.foreach(_.missRecords(records)) }
 //  def matchRecords(records: RDD[String]): Unit = { persists.foreach(_.matchRecords(records)) }
 
   def log(rt: Long, msg: String): Unit = { persists.foreach(_.log(rt, msg)) }
 
+//  def persistRecords(df: DataFrame, name: String): Unit = { persists.foreach(_.persistRecords(df, name)) }
+  def persistRecords(records: Iterable[String], name: String): Unit = { persists.foreach(_.persistRecords(records, name)) }
+//  def persistMetrics(metrics: Seq[String], name: String): Unit = { persists.foreach(_.persistMetrics(metrics, name)) }
+  def persistMetrics(metrics: Map[String, Any]): Unit = { persists.foreach(_.persistMetrics(metrics)) }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/OldHttpPersist.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/OldHttpPersist.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/OldHttpPersist.scala
index 357d6e1..84316b3 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/OldHttpPersist.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/OldHttpPersist.scala
@@ -1,87 +1,87 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.persist
-
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.utils.{HttpUtil, JsonUtil}
-import org.apache.spark.rdd.RDD
-
-// persist result by old http way -- temporary way
-case class OldHttpPersist(config: Map[String, Any], metricName: String, timeStamp: Long) extends Persist {
-
-  val Api = "api"
-  val Method = "method"
-
-  val api = config.getOrElse(Api, "").toString
-  val method = config.getOrElse(Method, "post").toString
-
-  def available(): Boolean = {
-    api.nonEmpty
-  }
-
-  def start(msg: String): Unit = {}
-  def finish(): Unit = {}
-
-  def result(rt: Long, result: Result): Unit = {
-    result match {
-      case ar: AccuracyResult => {
-        val matchPercentage: Double = if (ar.getTotal <= 0) 0 else (ar.getMatch * 1.0 / ar.getTotal) * 100
-        val dataMap = Map[String, Any](("metricName" -> metricName), ("timestamp" -> timeStamp), ("value" -> matchPercentage), ("count" -> ar.getTotal))
-        httpResult(dataMap)
-      }
-      case pr: ProfileResult => {
-        val dataMap = Map[String, Any](("metricName" -> metricName), ("timestamp" -> timeStamp), ("value" -> pr.getMatch), ("count" -> pr.getTotal))
-        httpResult(dataMap)
-      }
-      case _ => {
-        info(s"result: ${result}")
-      }
-    }
-  }
-
-  private def httpResult(dataMap: Map[String, Any]) = {
-    try {
-      val data = JsonUtil.toJson(dataMap)
-      // post
-      val params = Map[String, Object]()
-      val header = Map[String, Object](("content-type" -> "application/json"))
-
-      def func(): Boolean = {
-        HttpUtil.httpRequest(api, method, params, header, data)
-      }
-
-      PersistThreadPool.addTask(func _, 10)
-
-//      val status = HttpUtil.httpRequest(api, method, params, header, data)
-//      info(s"${method} to ${api} response status: ${status}")
-    } catch {
-      case e: Throwable => error(e.getMessage)
-    }
-
-  }
-
-  def records(recs: RDD[String], tp: String): Unit = {}
-  def records(recs: Iterable[String], tp: String): Unit = {}
-
-//  def missRecords(records: RDD[String]): Unit = {}
-//  def matchRecords(records: RDD[String]): Unit = {}
-
-  def log(rt: Long, msg: String): Unit = {}
-
-}
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.persist
+//
+//import org.apache.griffin.measure.result._
+//import org.apache.griffin.measure.utils.{HttpUtil, JsonUtil}
+//import org.apache.spark.rdd.RDD
+//
+//// persist result by old http way -- temporary way
+//case class OldHttpPersist(config: Map[String, Any], metricName: String, timeStamp: Long) extends Persist {
+//
+//  val Api = "api"
+//  val Method = "method"
+//
+//  val api = config.getOrElse(Api, "").toString
+//  val method = config.getOrElse(Method, "post").toString
+//
+//  def available(): Boolean = {
+//    api.nonEmpty
+//  }
+//
+//  def start(msg: String): Unit = {}
+//  def finish(): Unit = {}
+//
+//  def result(rt: Long, result: Result): Unit = {
+//    result match {
+//      case ar: AccuracyResult => {
+//        val matchPercentage: Double = if (ar.getTotal <= 0) 0 else (ar.getMatch * 1.0 / ar.getTotal) * 100
+//        val dataMap = Map[String, Any](("metricName" -> metricName), ("timestamp" -> timeStamp), ("value" -> matchPercentage), ("count" -> ar.getTotal))
+//        httpResult(dataMap)
+//      }
+//      case pr: ProfileResult => {
+//        val dataMap = Map[String, Any](("metricName" -> metricName), ("timestamp" -> timeStamp), ("value" -> pr.getMatch), ("count" -> pr.getTotal))
+//        httpResult(dataMap)
+//      }
+//      case _ => {
+//        info(s"result: ${result}")
+//      }
+//    }
+//  }
+//
+//  private def httpResult(dataMap: Map[String, Any]) = {
+//    try {
+//      val data = JsonUtil.toJson(dataMap)
+//      // post
+//      val params = Map[String, Object]()
+//      val header = Map[String, Object](("content-type" -> "application/json"))
+//
+//      def func(): Boolean = {
+//        HttpUtil.httpRequest(api, method, params, header, data)
+//      }
+//
+//      PersistThreadPool.addTask(func _, 10)
+//
+////      val status = HttpUtil.httpRequest(api, method, params, header, data)
+////      info(s"${method} to ${api} response status: ${status}")
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//
+//  }
+//
+//  def records(recs: RDD[String], tp: String): Unit = {}
+//  def records(recs: Iterable[String], tp: String): Unit = {}
+//
+////  def missRecords(records: RDD[String]): Unit = {}
+////  def matchRecords(records: RDD[String]): Unit = {}
+//
+//  def log(rt: Long, msg: String): Unit = {}
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/Persist.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/Persist.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/Persist.scala
index bc16599..2884fa6 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/Persist.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/Persist.scala
@@ -21,6 +21,7 @@ package org.apache.griffin.measure.persist
 import org.apache.griffin.measure.log.Loggable
 import org.apache.griffin.measure.result._
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
 
 import scala.util.Try
 
@@ -35,18 +36,21 @@ trait Persist extends Loggable with Serializable {
   def start(msg: String): Unit
   def finish(): Unit
 
-  def result(rt: Long, result: Result): Unit
+  def log(rt: Long, msg: String): Unit
 
-  def records(recs: RDD[String], tp: String): Unit
-  def records(recs: Iterable[String], tp: String): Unit
+//  def result(rt: Long, result: Result): Unit
+//
+//  def records(recs: RDD[String], tp: String): Unit
+//  def records(recs: Iterable[String], tp: String): Unit
 
-//  def missRecords(records: RDD[String]): Unit
-//  def matchRecords(records: RDD[String]): Unit
+//  def persistRecords(df: DataFrame, name: String): Unit
+  def persistRecords(records: Iterable[String], name: String): Unit
+//  def persistMetrics(metrics: Seq[String], name: String): Unit
+  def persistMetrics(metrics: Map[String, Any]): Unit
 
-  def log(rt: Long, msg: String): Unit
 }
 
-object PersistType {
-  final val MISS = "miss"
-  final val MATCH = "match"
-}
\ No newline at end of file
+//object PersistDataType {
+//  final val MISS = "miss"
+//  final val MATCH = "match"
+//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/PersistFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/PersistFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/PersistFactory.scala
index 4330160..3a74343 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/PersistFactory.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/PersistFactory.scala
@@ -27,7 +27,7 @@ case class PersistFactory(persistParams: Iterable[PersistParam], metricName: Str
 
   val HDFS_REGEX = """^(?i)hdfs$""".r
   val HTTP_REGEX = """^(?i)http$""".r
-  val OLDHTTP_REGEX = """^(?i)oldhttp$""".r
+//  val OLDHTTP_REGEX = """^(?i)oldhttp$""".r
   val LOG_REGEX = """^(?i)log$""".r
 
   def getPersists(timeStamp: Long): MultiPersists = {
@@ -40,7 +40,7 @@ case class PersistFactory(persistParams: Iterable[PersistParam], metricName: Str
     val persistTry = persistParam.persistType match {
       case HDFS_REGEX() => Try(HdfsPersist(config, metricName, timeStamp))
       case HTTP_REGEX() => Try(HttpPersist(config, metricName, timeStamp))
-      case OLDHTTP_REGEX() => Try(OldHttpPersist(config, metricName, timeStamp))
+//      case OLDHTTP_REGEX() => Try(OldHttpPersist(config, metricName, timeStamp))
       case LOG_REGEX() => Try(LoggerPersist(config, metricName, timeStamp))
       case _ => throw new Exception("not supported persist type")
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/Algo.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/Algo.scala b/measure/src/main/scala/org/apache/griffin/measure/process/Algo.scala
new file mode 100644
index 0000000..7f1b153
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/Algo.scala
@@ -0,0 +1,34 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.algo
+//
+//import org.apache.griffin.measure.config.params.env._
+//import org.apache.griffin.measure.config.params.user._
+//import org.apache.griffin.measure.log.Loggable
+//
+//import scala.util.Try
+//
+//trait Algo extends Loggable with Serializable {
+//
+//  val envParam: EnvParam
+//  val userParam: UserParam
+//
+//  def run(): Try[_]
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/BatchDqProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/BatchDqProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/process/BatchDqProcess.scala
new file mode 100644
index 0000000..737a43f
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/BatchDqProcess.scala
@@ -0,0 +1,117 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import java.util.Date
+
+import org.apache.griffin.measure.config.params._
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.data.source.DataSourceFactory
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.process.engine.{DqEngineFactory, SparkSqlEngine}
+import org.apache.griffin.measure.rule.adaptor.{RuleAdaptorGroup, RunPhase}
+import org.apache.griffin.measure.rule.udf.GriffinUdfs
+import org.apache.griffin.measure.utils.JsonUtil
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.{SparkConf, SparkContext}
+
+import scala.util.Try
+
+case class BatchDqProcess(allParam: AllParam) extends DqProcess {
+
+  val envParam: EnvParam = allParam.envParam
+  val userParam: UserParam = allParam.userParam
+
+  val metricName = userParam.name
+  val sparkParam = envParam.sparkParam
+
+  var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
+
+  def retriable: Boolean = false
+
+  def init: Try[_] = Try {
+    val conf = new SparkConf().setAppName(metricName)
+    conf.setAll(sparkParam.config)
+    sparkContext = new SparkContext(conf)
+    sparkContext.setLogLevel(sparkParam.logLevel)
+    sqlContext = new HiveContext(sparkContext)
+
+    // register udf
+    GriffinUdfs.register(sqlContext)
+
+    // init adaptors
+    val dataSourceNames = userParam.dataSources.map(_.name)
+    RuleAdaptorGroup.init(sqlContext, dataSourceNames)
+  }
+
+  def run: Try[_] = Try {
+    // start time
+    val startTime = new Date().getTime()
+
+    // get persists to persist measure result
+    val persistFactory = PersistFactory(envParam.persistParams, metricName)
+    val persist: Persist = persistFactory.getPersists(startTime)
+
+    // persist start id
+    val applicationId = sparkContext.applicationId
+    persist.start(applicationId)
+
+    // get dq engines
+    val dqEngines = DqEngineFactory.genDqEngines(sqlContext)
+
+    // generate data sources
+    val dataSources = DataSourceFactory.genDataSources(sqlContext, null, dqEngines, userParam.dataSources, metricName)
+    dataSources.foreach(_.init)
+
+    // init data sources
+    dqEngines.loadData(dataSources, startTime)
+
+    // generate rule steps
+    val ruleSteps = RuleAdaptorGroup.genConcreteRuleSteps(userParam.evaluateRuleParam, RunPhase)
+
+    // run rules
+    dqEngines.runRuleSteps(ruleSteps)
+
+    // persist results
+    val timeGroups = dqEngines.persistAllMetrics(ruleSteps, persistFactory)
+
+    val rdds = dqEngines.collectUpdateRDDs(ruleSteps, timeGroups)
+    rdds.foreach(_._2.cache())
+
+    dqEngines.persistAllRecords(rdds, persistFactory)
+//    dqEngines.persistAllRecords(ruleSteps, persistFactory, timeGroups)
+
+    rdds.foreach(_._2.unpersist())
+
+    // end time
+    val endTime = new Date().getTime
+    persist.log(endTime, s"process using time: ${endTime - startTime} ms")
+
+    // finish
+    persist.finish()
+  }
+
+  def end: Try[_] = Try {
+    sparkContext.stop
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/DqProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/DqProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/process/DqProcess.scala
new file mode 100644
index 0000000..50b04a8
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/DqProcess.scala
@@ -0,0 +1,40 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.log.Loggable
+
+import scala.util.Try
+
+trait DqProcess extends Loggable with Serializable {
+
+  val envParam: EnvParam
+  val userParam: UserParam
+
+  def init: Try[_]
+
+  def run: Try[_]
+
+  def end: Try[_]
+
+  def retriable: Boolean
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/ProcessType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/ProcessType.scala b/measure/src/main/scala/org/apache/griffin/measure/process/ProcessType.scala
new file mode 100644
index 0000000..36f88e1
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/ProcessType.scala
@@ -0,0 +1,47 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import scala.util.matching.Regex
+
+sealed trait ProcessType {
+  val regex: Regex
+  val desc: String
+}
+
+object ProcessType {
+  private val procTypes: List[ProcessType] = List(BatchProcessType, StreamingProcessType)
+  def apply(ptn: String): ProcessType = {
+    procTypes.filter(tp => ptn match {
+      case tp.regex() => true
+      case _ => false
+    }).headOption.getOrElse(BatchProcessType)
+  }
+  def unapply(pt: ProcessType): Option[String] = Some(pt.desc)
+}
+
+final case object BatchProcessType extends ProcessType {
+  val regex = """^(?i)batch$""".r
+  val desc = "batch"
+}
+
+final case object StreamingProcessType extends ProcessType {
+  val regex = """^(?i)streaming$""".r
+  val desc = "streaming"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqProcess.scala
new file mode 100644
index 0000000..a567941
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqProcess.scala
@@ -0,0 +1,157 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import java.util.Date
+
+import org.apache.griffin.measure.cache.info.InfoCacheInstance
+import org.apache.griffin.measure.config.params._
+import org.apache.griffin.measure.config.params.env._
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.data.source.DataSourceFactory
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.process.engine.DqEngineFactory
+import org.apache.griffin.measure.rule.adaptor.RuleAdaptorGroup
+import org.apache.griffin.measure.rule.udf.GriffinUdfs
+import org.apache.griffin.measure.utils.TimeUtil
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.streaming.{Milliseconds, StreamingContext}
+import org.apache.spark.{SparkConf, SparkContext}
+
+import scala.util.Try
+
+case class StreamingDqProcess(allParam: AllParam) extends DqProcess {
+
+  val envParam: EnvParam = allParam.envParam
+  val userParam: UserParam = allParam.userParam
+
+  val metricName = userParam.name
+  val sparkParam = envParam.sparkParam
+
+  var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
+
+  def retriable: Boolean = true
+
+  def init: Try[_] = Try {
+    val conf = new SparkConf().setAppName(metricName)
+    conf.setAll(sparkParam.config)
+    sparkContext = new SparkContext(conf)
+    sparkContext.setLogLevel(sparkParam.logLevel)
+    sqlContext = new HiveContext(sparkContext)
+
+    // init info cache instance
+    InfoCacheInstance.initInstance(envParam.infoCacheParams, metricName)
+    InfoCacheInstance.init
+
+    // register udf
+    GriffinUdfs.register(sqlContext)
+
+    // init adaptors
+    val dataSourceNames = userParam.dataSources.map(_.name)
+    RuleAdaptorGroup.init(sqlContext, dataSourceNames)
+  }
+
+  def run: Try[_] = Try {
+    val ssc = StreamingContext.getOrCreate(sparkParam.cpDir, () => {
+      try {
+        createStreamingContext
+      } catch {
+        case e: Throwable => {
+          error(s"create streaming context error: ${e.getMessage}")
+          throw e
+        }
+      }
+    })
+
+    // start time
+    val startTime = new Date().getTime()
+
+    // get persists to persist measure result
+    val persistFactory = PersistFactory(envParam.persistParams, metricName)
+    val persist: Persist = persistFactory.getPersists(startTime)
+
+    // persist start id
+    val applicationId = sparkContext.applicationId
+    persist.start(applicationId)
+
+    // get dq engines
+    val dqEngines = DqEngineFactory.genDqEngines(sqlContext)
+
+    // generate data sources
+    val dataSources = DataSourceFactory.genDataSources(sqlContext, ssc, dqEngines, userParam.dataSources, metricName)
+    dataSources.foreach(_.init)
+
+    // process thread
+    val dqThread = StreamingDqThread(dqEngines, dataSources, userParam.evaluateRuleParam, persistFactory, persist)
+
+    // init data sources
+//    dqEngines.loadData(dataSources)
+//
+//    // generate rule steps
+//    val ruleSteps = RuleAdaptorGroup.genConcreteRuleSteps(userParam.evaluateRuleParam)
+//
+//    // run rules
+//    dqEngines.runRuleSteps(ruleSteps)
+//
+//    // persist results
+//    dqEngines.persistAllResults(ruleSteps, persist)
+
+    // end time
+//    val endTime = new Date().getTime
+//    persist.log(endTime, s"process using time: ${endTime - startTime} ms")
+
+    val processInterval = TimeUtil.milliseconds(sparkParam.processInterval) match {
+      case Some(interval) => interval
+      case _ => throw new Exception("invalid batch interval")
+    }
+    val process = TimingProcess(processInterval, dqThread)
+    process.startup()
+
+    ssc.start()
+    ssc.awaitTermination()
+    ssc.stop(stopSparkContext=true, stopGracefully=true)
+
+    // finish
+    persist.finish()
+
+//    process.shutdown()
+  }
+
+  def end: Try[_] = Try {
+    sparkContext.stop
+
+    InfoCacheInstance.close
+  }
+
+  def createStreamingContext: StreamingContext = {
+    val batchInterval = TimeUtil.milliseconds(sparkParam.batchInterval) match {
+      case Some(interval) => Milliseconds(interval)
+      case _ => throw new Exception("invalid batch interval")
+    }
+    val ssc = new StreamingContext(sparkContext, batchInterval)
+    ssc.checkpoint(sparkParam.cpDir)
+
+
+
+    ssc
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqThread.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqThread.scala b/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqThread.scala
new file mode 100644
index 0000000..df1cc1b
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/StreamingDqThread.scala
@@ -0,0 +1,185 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import java.util.Date
+import java.util.concurrent.TimeUnit
+
+import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+import org.apache.griffin.measure.cache.result.CacheResultProcesser
+import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
+import org.apache.griffin.measure.data.source.DataSource
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.griffin.measure.rule.adaptor.{RuleAdaptorGroup, RunPhase}
+
+case class StreamingDqThread(dqEngines: DqEngines,
+                             dataSources: Seq[DataSource],
+                             evaluateRuleParam: EvaluateRuleParam,
+                             persistFactory: PersistFactory,
+                             appPersist: Persist
+                            ) extends Runnable with Loggable {
+
+  val lock = InfoCacheInstance.genLock("process")
+
+  def run(): Unit = {
+    val updateTimeDate = new Date()
+    val updateTime = updateTimeDate.getTime
+    println(s"===== [${updateTimeDate}] process begins =====")
+    val locked = lock.lock(5, TimeUnit.SECONDS)
+    if (locked) {
+      try {
+
+        val st = new Date().getTime
+        appPersist.log(st, s"starting process ...")
+
+        TimeInfoCache.startTimeInfoCache
+
+        // init data sources
+        dqEngines.loadData(dataSources, st)
+
+        // generate rule steps
+        val ruleSteps = RuleAdaptorGroup.genConcreteRuleSteps(evaluateRuleParam, RunPhase)
+
+        // run rules
+        dqEngines.runRuleSteps(ruleSteps)
+
+        val ct = new Date().getTime
+        val calculationTimeStr = s"calculation using time: ${ct - st} ms"
+        println(calculationTimeStr)
+        appPersist.log(ct, calculationTimeStr)
+
+        // persist results
+        val timeGroups = dqEngines.persistAllMetrics(ruleSteps, persistFactory)
+
+        val rt = new Date().getTime
+        val persistResultTimeStr = s"persist result using time: ${rt - ct} ms"
+        println(persistResultTimeStr)
+        appPersist.log(rt, persistResultTimeStr)
+
+        val rdds = dqEngines.collectUpdateRDDs(ruleSteps, timeGroups)
+        rdds.foreach(_._2.cache())
+        rdds.foreach { pr =>
+          val (step, rdd) = pr
+          val cnt = rdd.count
+          println(s"step [${step.name}] group count: ${cnt}")
+        }
+
+        val lt = new Date().getTime
+        val collectoRddTimeStr = s"collect records using time: ${lt - rt} ms"
+        println(collectoRddTimeStr)
+        appPersist.log(lt, collectoRddTimeStr)
+
+        // persist records
+        dqEngines.persistAllRecords(rdds, persistFactory)
+//        dqEngines.persistAllRecords(ruleSteps, persistFactory, timeGroups)
+
+        // update data source
+        dqEngines.updateDataSources(rdds, dataSources)
+//        dqEngines.updateDataSources(ruleSteps, dataSources, timeGroups)
+
+        rdds.foreach(_._2.unpersist())
+
+        TimeInfoCache.endTimeInfoCache
+
+        // clean old data
+        cleanData
+
+        val et = new Date().getTime
+        val persistTimeStr = s"persist records using time: ${et - lt} ms"
+        println(persistTimeStr)
+        appPersist.log(et, persistTimeStr)
+
+      } catch {
+        case e: Throwable => error(s"process error: ${e.getMessage}")
+      } finally {
+        lock.unlock()
+      }
+    } else {
+      println(s"===== [${updateTimeDate}] process ignores =====")
+    }
+    val endTime = new Date().getTime
+    println(s"===== [${updateTimeDate}] process ends, using ${endTime - updateTime} ms =====")
+  }
+
+  // clean old data and old result cache
+  private def cleanData(): Unit = {
+    try {
+      dataSources.foreach(_.cleanOldData)
+      dataSources.foreach(_.dropTable)
+
+      val cleanTime = TimeInfoCache.getCleanTime
+      CacheResultProcesser.refresh(cleanTime)
+    } catch {
+      case e: Throwable => error(s"clean data error: ${e.getMessage}")
+    }
+  }
+
+//  // calculate accuracy between source data and target data
+//  private def accuracy(sourceData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
+//               targetData: RDD[(Product, (Map[String, Any], Map[String, Any]))],
+//               ruleAnalyzer: RuleAnalyzer) = {
+//    // 1. cogroup
+//    val allKvs = sourceData.cogroup(targetData)
+//
+//    // 2. accuracy calculation
+//    val (accuResult, missingRdd, matchedRdd) = AccuracyCore.accuracy(allKvs, ruleAnalyzer)
+//
+//    (accuResult, missingRdd, matchedRdd)
+//  }
+//
+//  private def reorgByTimeGroup(rdd: RDD[(Product, (Map[String, Any], Map[String, Any]))]
+//                      ): RDD[(Long, (Product, (Map[String, Any], Map[String, Any])))] = {
+//    rdd.flatMap { row =>
+//      val (key, (value, info)) = row
+//      val b: Option[(Long, (Product, (Map[String, Any], Map[String, Any])))] = info.get(TimeStampInfo.key) match {
+//        case Some(t: Long) => Some((t, row))
+//        case _ => None
+//      }
+//      b
+//    }
+//  }
+//
+//  // convert data into a string
+//  def record2String(rec: (Product, (Map[String, Any], Map[String, Any])), dataPersist: Iterable[Expr], infoPersist: Iterable[Expr]): String = {
+//    val (key, (data, info)) = rec
+//    val persistData = getPersistMap(data, dataPersist)
+//    val persistInfo = info.mapValues { value =>
+//      value match {
+//        case vd: Map[String, Any] => getPersistMap(vd, infoPersist)
+//        case v => v
+//      }
+//    }.map(identity)
+//    s"${persistData} [${persistInfo}]"
+//  }
+//
+//  // get the expr value map of the persist expressions
+//  private def getPersistMap(data: Map[String, Any], persist: Iterable[Expr]): Map[String, Any] = {
+//    val persistMap = persist.map(e => (e._id, e.desc)).toMap
+//    data.flatMap { pair =>
+//      val (k, v) = pair
+//      persistMap.get(k) match {
+//        case Some(d) => Some((d -> v))
+//        case _ => None
+//      }
+//    }
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/TimingProcess.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/TimingProcess.scala b/measure/src/main/scala/org/apache/griffin/measure/process/TimingProcess.scala
new file mode 100644
index 0000000..8d9bcb2
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/TimingProcess.scala
@@ -0,0 +1,46 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process
+
+import java.util.concurrent.{Executors, ThreadPoolExecutor, TimeUnit}
+import java.util.{Timer, TimerTask}
+
+case class TimingProcess(interval: Long, runnable: Runnable) {
+
+  val pool: ThreadPoolExecutor = Executors.newFixedThreadPool(5).asInstanceOf[ThreadPoolExecutor]
+
+  val timer = new Timer("process", true)
+
+  val timerTask = new TimerTask() {
+    override def run(): Unit = {
+      pool.submit(runnable)
+    }
+  }
+
+  def startup(): Unit = {
+    timer.schedule(timerTask, interval, interval)
+  }
+
+  def shutdown(): Unit = {
+    timer.cancel()
+    pool.shutdown()
+    pool.awaitTermination(10, TimeUnit.SECONDS)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/check/DataChecker.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/check/DataChecker.scala b/measure/src/main/scala/org/apache/griffin/measure/process/check/DataChecker.scala
new file mode 100644
index 0000000..91855c2
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/check/DataChecker.scala
@@ -0,0 +1,29 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.check
+
+import org.apache.spark.sql.SQLContext
+
+case class DataChecker(sqlContext: SQLContext) {
+
+  def existDataSourceName(name: String): Boolean = {
+    sqlContext.tableNames.exists(_ == name)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/DataFrameOprEngine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/DataFrameOprEngine.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DataFrameOprEngine.scala
new file mode 100644
index 0000000..b409b8d
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DataFrameOprEngine.scala
@@ -0,0 +1,165 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import java.util.Date
+
+import org.apache.griffin.measure.cache.result.CacheResultProcesser
+import org.apache.griffin.measure.config.params.user.DataSourceParam
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.data.source.{DataSource, DataSourceFactory}
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.result.AccuracyResult
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.step._
+import org.apache.griffin.measure.utils.JsonUtil
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import org.apache.spark.streaming.StreamingContext
+
+case class DataFrameOprEngine(sqlContext: SQLContext) extends SparkDqEngine {
+
+  def runRuleStep(ruleStep: ConcreteRuleStep): Boolean = {
+    ruleStep match {
+      case DfOprStep(name, rule, details, _, _) => {
+        try {
+          rule match {
+            case DataFrameOprs._fromJson => {
+              val df = DataFrameOprs.fromJson(sqlContext, details)
+              df.registerTempTable(name)
+            }
+            case DataFrameOprs._accuracy => {
+              val df = DataFrameOprs.accuracy(sqlContext, details)
+              df.registerTempTable(name)
+            }
+            case DataFrameOprs._clear => {
+              val df = DataFrameOprs.clear(sqlContext, details)
+              df.registerTempTable(name)
+            }
+            case _ => {
+              throw new Exception(s"df opr [ ${rule} ] not supported")
+            }
+          }
+          true
+        } catch {
+          case e: Throwable => {
+            error(s"run df opr [ ${rule} ] error: ${e.getMessage}")
+            false
+          }
+        }
+      }
+      case _ => false
+    }
+  }
+
+}
+
+object DataFrameOprs {
+
+  final val _fromJson = "from_json"
+  final val _accuracy = "accuracy"
+  final val _clear = "clear"
+
+  def fromJson(sqlContext: SQLContext, details: Map[String, Any]): DataFrame = {
+    val _dfName = "df.name"
+    val _colName = "col.name"
+    val dfName = details.getOrElse(_dfName, "").toString
+    val colNameOpt = details.get(_colName).map(_.toString)
+
+    val df = sqlContext.table(s"`${dfName}`")
+    val rdd = colNameOpt match {
+      case Some(colName: String) => df.map(_.getAs[String](colName))
+      case _ => df.map(_.getAs[String](0))
+    }
+    sqlContext.read.json(rdd)
+  }
+
+  def accuracy(sqlContext: SQLContext, details: Map[String, Any]): DataFrame = {
+    val _dfName = "df.name"
+    val _miss = "miss"
+    val _total = "total"
+    val _matched = "matched"
+    val _tmst = "tmst"
+    val dfName = details.getOrElse(_dfName, _dfName).toString
+    val miss = details.getOrElse(_miss, _miss).toString
+    val total = details.getOrElse(_total, _total).toString
+    val matched = details.getOrElse(_matched, _matched).toString
+    val tmst = details.getOrElse(_tmst, _tmst).toString
+
+    val updateTime = new Date().getTime
+
+    def getLong(r: Row, k: String): Long = {
+      try {
+        r.getAs[Long](k)
+      } catch {
+        case e: Throwable => 0L
+      }
+    }
+
+    val df = sqlContext.table(s"`${dfName}`")
+    val results = df.flatMap { row =>
+      val t = getLong(row, tmst)
+      if (t > 0) {
+        val missCount = getLong(row, miss)
+        val totalCount = getLong(row, total)
+        val ar = AccuracyResult(missCount, totalCount)
+        Some((t, ar))
+      } else None
+    }.collect
+
+    val updateResults = results.flatMap { pair =>
+      val (t, result) = pair
+      val updatedCacheResultOpt = CacheResultProcesser.genUpdateCacheResult(t, updateTime, result)
+      updatedCacheResultOpt
+    }
+
+    // update
+    updateResults.foreach { r =>
+      CacheResultProcesser.update(r)
+    }
+
+    val schema = StructType(Array(
+      StructField(tmst, LongType),
+      StructField(miss, LongType),
+      StructField(total, LongType),
+      StructField(matched, LongType)
+    ))
+    val rows = updateResults.map { r =>
+      val ar = r.result.asInstanceOf[AccuracyResult]
+      Row(r.timeGroup, ar.miss, ar.total, ar.getMatch)
+    }
+    val rowRdd = sqlContext.sparkContext.parallelize(rows)
+    sqlContext.createDataFrame(rowRdd, schema)
+
+  }
+
+  def clear(sqlContext: SQLContext, details: Map[String, Any]): DataFrame = {
+    val _dfName = "df.name"
+    val dfName = details.getOrElse(_dfName, "").toString
+
+    val df = sqlContext.table(s"`${dfName}`")
+    val emptyRdd = sqlContext.sparkContext.emptyRDD[Row]
+    sqlContext.createDataFrame(emptyRdd, df.schema)
+  }
+
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngine.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngine.scala
new file mode 100644
index 0000000..84d5917
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngine.scala
@@ -0,0 +1,41 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import org.apache.griffin.measure.config.params.user.DataSourceParam
+import org.apache.griffin.measure.data.source.DataSource
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.step._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+
+trait DqEngine extends Loggable with Serializable {
+
+  def runRuleStep(ruleStep: ConcreteRuleStep): Boolean
+
+  def collectMetrics(ruleStep: ConcreteRuleStep): Map[Long, Map[String, Any]]
+
+//  def collectRecords(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]]
+//
+//  def collectUpdateCacheDatas(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]]
+
+  def collectUpdateRDD(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]]
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngineFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngineFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngineFactory.scala
new file mode 100644
index 0000000..e075584
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngineFactory.scala
@@ -0,0 +1,47 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.streaming.StreamingContext
+
+
+object DqEngineFactory {
+
+  private val engineTypes = List("spark-sql", "df-opr")
+
+  private final val SparkSqlEngineType = "spark-sql"
+  private final val DataFrameOprEngineType = "df-opr"
+
+  def genDqEngines(sqlContext: SQLContext): DqEngines = {
+    val engines = engineTypes.flatMap { et =>
+      genDqEngine(et, sqlContext)
+    }
+    DqEngines(engines)
+  }
+
+  private def genDqEngine(engineType: String, sqlContext: SQLContext): Option[DqEngine] = {
+    engineType match {
+      case SparkSqlEngineType => Some(SparkSqlEngine(sqlContext))
+      case DataFrameOprEngineType => Some(DataFrameOprEngine(sqlContext))
+      case _ => None
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngines.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngines.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngines.scala
new file mode 100644
index 0000000..1bafa15
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/DqEngines.scala
@@ -0,0 +1,208 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import org.apache.griffin.measure.config.params.user.DataSourceParam
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.data.source._
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.step._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+
+case class DqEngines(engines: Seq[DqEngine]) extends DqEngine {
+
+  val persistOrder: List[PersistType] = List(MetricPersistType, RecordPersistType)
+
+  def loadData(dataSources: Seq[DataSource], ms: Long): Unit = {
+    dataSources.foreach { ds =>
+      ds.loadData(ms)
+    }
+  }
+
+  def runRuleSteps(ruleSteps: Seq[ConcreteRuleStep]): Unit = {
+    ruleSteps.foreach { ruleStep =>
+      runRuleStep(ruleStep)
+    }
+  }
+
+  def persistAllMetrics(ruleSteps: Seq[ConcreteRuleStep], persistFactory: PersistFactory
+                       ): Iterable[Long] = {
+    val metricSteps = ruleSteps.filter(_.persistType == MetricPersistType)
+    val allMetrics: Map[Long, Map[String, Any]] = {
+      metricSteps.foldLeft(Map[Long, Map[String, Any]]()) { (ret, step) =>
+        val metrics = collectMetrics(step)
+        metrics.foldLeft(ret) { (total, pair) =>
+          val (k, v) = pair
+          total.get(k) match {
+            case Some(map) => total + (k -> (map ++ v))
+            case _ => total + pair
+          }
+        }
+      }
+    }
+    val updateTimeGroups = allMetrics.keys
+    allMetrics.foreach { pair =>
+      val (t, metric) = pair
+      val persist = persistFactory.getPersists(t)
+      persist.persistMetrics(metric)
+    }
+    updateTimeGroups
+  }
+
+//  def persistAllRecords(ruleSteps: Seq[ConcreteRuleStep], persistFactory: PersistFactory,
+//                        timeGroups: Iterable[Long]): Unit = {
+//    val recordSteps = ruleSteps.filter(_.persistType == RecordPersistType)
+//    recordSteps.foreach { step =>
+//      collectRecords(step, timeGroups) match {
+//        case Some(rdd) => {
+//          val name = step.name
+//          rdd.foreach { pair =>
+//            val (t, items) = pair
+//            val persist = persistFactory.getPersists(t)
+//            persist.persistRecords(items, name)
+//          }
+//        }
+//        case _ => {
+//          println(s"empty records to persist")
+//        }
+//      }
+//    }
+//  }
+//
+//  def updateDataSources(ruleSteps: Seq[ConcreteRuleStep], dataSources: Seq[DataSource],
+//                        timeGroups: Iterable[Long]): Unit = {
+//    val updateSteps = ruleSteps.filter(_.updateDataSource.nonEmpty)
+//    updateSteps.foreach { step =>
+//      collectUpdateCacheDatas(step, timeGroups) match {
+//        case Some(rdd) => {
+//          val udpateDataSources = dataSources.filter { ds =>
+//            step.updateDataSource match {
+//              case Some(dsName) if (dsName == ds.name) => true
+//              case _ => false
+//            }
+//          }
+//          if (udpateDataSources.size > 0) {
+//            val name = step.name
+//            rdd.foreach { pair =>
+//              val (t, items) = pair
+//              udpateDataSources.foreach { ds =>
+//                ds.dataSourceCacheOpt.foreach(_.updateData(items, t))
+//              }
+//            }
+//          }
+//        }
+//        case _ => {
+//          println(s"empty data source to update")
+//        }
+//      }
+//    }
+//  }
+
+  ///////////////////////////
+
+  def runRuleStep(ruleStep: ConcreteRuleStep): Boolean = {
+    val ret = engines.foldLeft(false) { (done, engine) =>
+      done || engine.runRuleStep(ruleStep)
+    }
+    if (!ret) warn(s"run rule step warn: no dq engine support ${ruleStep}")
+    ret
+  }
+
+  ///////////////////////////
+
+//  def collectRecords(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]] = {
+//    engines.flatMap { engine =>
+//      engine.collectRecords(ruleStep, timeGroups)
+//    }.headOption
+//  }
+//  def collectUpdateCacheDatas(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]] = {
+//    engines.flatMap { engine =>
+//      engine.collectUpdateCacheDatas(ruleStep, timeGroups)
+//    }.headOption
+//  }
+  def collectMetrics(ruleStep: ConcreteRuleStep): Map[Long, Map[String, Any]] = {
+    val ret = engines.foldLeft(Map[Long, Map[String, Any]]()) { (ret, engine) =>
+      ret ++ engine.collectMetrics(ruleStep)
+    }
+//    if (ret.isEmpty) warn(s"collect metrics warn: no metrics collected for ${ruleStep}")
+    ret
+  }
+
+  def collectUpdateRDD(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]
+                      ): Option[RDD[(Long, Iterable[String])]] = {
+    engines.flatMap { engine =>
+      engine.collectUpdateRDD(ruleStep, timeGroups)
+    }.headOption
+  }
+
+  ////////////////////////////
+
+  def collectUpdateRDDs(ruleSteps: Seq[ConcreteRuleStep], timeGroups: Iterable[Long]
+                       ): Seq[(ConcreteRuleStep, RDD[(Long, Iterable[String])])] = {
+    ruleSteps.flatMap { rs =>
+      collectUpdateRDD(rs, timeGroups) match {
+        case Some(rdd) => Some((rs, rdd))
+        case _ => None
+      }
+    }
+  }
+
+  def persistAllRecords(stepRdds: Seq[(ConcreteRuleStep, RDD[(Long, Iterable[String])])],
+                        persistFactory: PersistFactory): Unit = {
+    stepRdds.foreach { stepRdd =>
+      val (step, rdd) = stepRdd
+      if (step.persistType == RecordPersistType) {
+        val name = step.name
+        rdd.foreach { pair =>
+          val (t, items) = pair
+          val persist = persistFactory.getPersists(t)
+          persist.persistRecords(items, name)
+        }
+      }
+    }
+  }
+
+  def updateDataSources(stepRdds: Seq[(ConcreteRuleStep, RDD[(Long, Iterable[String])])],
+                        dataSources: Seq[DataSource]): Unit = {
+    stepRdds.foreach { stepRdd =>
+      val (step, rdd) = stepRdd
+      if (step.updateDataSource.nonEmpty) {
+        val udpateDataSources = dataSources.filter { ds =>
+          step.updateDataSource match {
+            case Some(dsName) if (dsName == ds.name) => true
+            case _ => false
+          }
+        }
+        if (udpateDataSources.size > 0) {
+          val name = step.name
+          rdd.foreach { pair =>
+            val (t, items) = pair
+            udpateDataSources.foreach { ds =>
+              ds.dataSourceCacheOpt.foreach(_.updateData(items, t))
+            }
+          }
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkDqEngine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkDqEngine.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkDqEngine.scala
new file mode 100644
index 0000000..ee994fd
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkDqEngine.scala
@@ -0,0 +1,167 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.rule.dsl.{MetricPersistType, RecordPersistType}
+import org.apache.griffin.measure.rule.step._
+import org.apache.griffin.measure.utils.JsonUtil
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, SQLContext}
+
+trait SparkDqEngine extends DqEngine {
+
+  val sqlContext: SQLContext
+
+  def collectMetrics(ruleStep: ConcreteRuleStep): Map[Long, Map[String, Any]] = {
+    val emptyMap = Map[String, Any]()
+    ruleStep match {
+      case step: ConcreteRuleStep if (step.persistType == MetricPersistType) => {
+        val name = step.name
+        try {
+          val pdf = sqlContext.table(s"`${name}`")
+          val records = pdf.toJSON.collect()
+
+          val pairs = records.flatMap { rec =>
+            try {
+              val value = JsonUtil.toAnyMap(rec)
+              value.get(GroupByColumn.tmst) match {
+                case Some(t) => {
+                  val key = t.toString.toLong
+                  Some((key, value))
+                }
+                case _ => None
+              }
+            } catch {
+              case e: Throwable => None
+            }
+          }
+          val groupedPairs = pairs.foldLeft(Map[Long, Seq[Map[String, Any]]]()) { (ret, pair) =>
+            val (k, v) = pair
+            ret.get(k) match {
+              case Some(seq) => ret + (k -> (seq :+ v))
+              case _ => ret + (k -> (v :: Nil))
+            }
+          }
+          groupedPairs.mapValues { vs =>
+            if (vs.size > 1) {
+              Map[String, Any]((name -> vs))
+            } else {
+              vs.headOption.getOrElse(emptyMap)
+            }
+          }
+        } catch {
+          case e: Throwable => {
+            error(s"collect metrics ${name} error: ${e.getMessage}")
+            Map[Long, Map[String, Any]]()
+          }
+        }
+      }
+      case _ => Map[Long, Map[String, Any]]()
+    }
+  }
+
+  def collectUpdateRDD(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]
+                      ): Option[RDD[(Long, Iterable[String])]] = {
+    ruleStep match {
+      case step: ConcreteRuleStep if ((step.persistType == RecordPersistType)
+        || (step.updateDataSource.nonEmpty)) => {
+        val name = step.name
+        try {
+          val pdf = sqlContext.table(s"`${name}`")
+          val cols = pdf.columns
+          val rdd = pdf.flatMap { row =>
+            val values = cols.flatMap { col =>
+              Some((col, row.getAs[Any](col)))
+            }.toMap
+            values.get(GroupByColumn.tmst) match {
+              case Some(t: Long) if (timeGroups.exists(_ == t)) => Some((t, JsonUtil.toJson(values)))
+              case _ => None
+            }
+          }.groupByKey()
+          Some(rdd)
+        } catch {
+          case e: Throwable => {
+            error(s"collect records ${name} error: ${e.getMessage}")
+            None
+          }
+        }
+      }
+      case _ => None
+    }
+  }
+
+//  def collectRecords(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]] = {
+//    ruleStep match {
+//      case step: ConcreteRuleStep if (step.persistType == RecordPersistType) => {
+//        val name = step.name
+//        try {
+//          val pdf = sqlContext.table(s"`${name}`")
+//          val cols = pdf.columns
+//          val rdd = pdf.flatMap { row =>
+//            val values = cols.flatMap { col =>
+//              Some((col, row.getAs[Any](col)))
+//            }.toMap
+//            values.get(GroupByColumn.tmst) match {
+//              case Some(t: Long) if (timeGroups.exists(_ == t)) => Some((t, JsonUtil.toJson(values)))
+//              case _ => None
+//            }
+//          }.groupByKey()
+//          Some(rdd)
+//        } catch {
+//          case e: Throwable => {
+//            error(s"collect records ${name} error: ${e.getMessage}")
+//            None
+//          }
+//        }
+//      }
+//      case _ => None
+//    }
+//  }
+//
+//  def collectUpdateCacheDatas(ruleStep: ConcreteRuleStep, timeGroups: Iterable[Long]): Option[RDD[(Long, Iterable[String])]] = {
+//    ruleStep match {
+//      case step: ConcreteRuleStep if (step.updateDataSource.nonEmpty) => {
+//        val name = step.name
+//        try {
+//          val pdf = sqlContext.table(s"`${name}`")
+//          val cols = pdf.columns
+//          val rdd = pdf.flatMap { row =>
+//            val values = cols.flatMap { col =>
+//              Some((col, row.getAs[Any](col)))
+//            }.toMap
+//            values.get(GroupByColumn.tmst) match {
+//              case Some(t: Long) if (timeGroups.exists(_ == t)) => Some((t, JsonUtil.toJson(values)))
+//              case _ => None
+//            }
+//          }.groupByKey()
+//          Some(rdd)
+//        } catch {
+//          case e: Throwable => {
+//            error(s"collect update cache datas ${name} error: ${e.getMessage}")
+//            None
+//          }
+//        }
+//      }
+//      case _ => None
+//    }
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkRowFormatter.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkRowFormatter.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkRowFormatter.scala
new file mode 100644
index 0000000..6ed0559
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkRowFormatter.scala
@@ -0,0 +1,62 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.types.{ArrayType, DataType, StructField, StructType}
+
+import scala.collection.mutable.ArrayBuffer
+
+object SparkRowFormatter {
+
+  def formatRow(row: Row): Map[String, Any] = {
+    formatRowWithSchema(row, row.schema)
+  }
+
+  private def formatRowWithSchema(row: Row, schema: StructType): Map[String, Any] = {
+    formatStruct(schema.fields, row)
+  }
+
+  private def formatStruct(schema: Seq[StructField], r: Row) = {
+    val paired = schema.zip(r.toSeq)
+    paired.foldLeft(Map[String, Any]())((s, p) => s ++ formatItem(p))
+  }
+
+  private def formatItem(p: Pair[StructField, Any]): Map[String, Any] = {
+    p match {
+      case (sf, a) =>
+        sf.dataType match {
+          case ArrayType(et, _) =>
+            Map(sf.name -> (if (a == null) a else formatArray(et, a.asInstanceOf[ArrayBuffer[Any]])))
+          case StructType(s) =>
+            Map(sf.name -> (if (a == null) a else formatStruct(s, a.asInstanceOf[Row])))
+          case _ => Map(sf.name -> a)
+        }
+    }
+  }
+
+  private def formatArray(et: DataType, arr: ArrayBuffer[Any]): Seq[Any] = {
+    et match {
+      case StructType(s) => arr.map(e => formatStruct(s, e.asInstanceOf[Row]))
+      case ArrayType(t, _) =>
+        arr.map(e => formatArray(t, e.asInstanceOf[ArrayBuffer[Any]]))
+      case _ => arr
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkSqlEngine.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkSqlEngine.scala b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkSqlEngine.scala
new file mode 100644
index 0000000..15df3b5
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/process/engine/SparkSqlEngine.scala
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.process.engine
+
+import java.util.Date
+
+import org.apache.griffin.measure.config.params.user.DataSourceParam
+import org.apache.griffin.measure.data.connector.GroupByColumn
+import org.apache.griffin.measure.data.source._
+import org.apache.griffin.measure.persist.{Persist, PersistFactory}
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.step._
+import org.apache.griffin.measure.utils.JsonUtil
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, GroupedData, SQLContext}
+import org.apache.spark.streaming.StreamingContext
+
+case class SparkSqlEngine(sqlContext: SQLContext) extends SparkDqEngine {
+
+  def runRuleStep(ruleStep: ConcreteRuleStep): Boolean = {
+    ruleStep match {
+      case SparkSqlStep(name, rule, _, _, _) => {
+        try {
+          val rdf = sqlContext.sql(rule)
+          rdf.registerTempTable(name)
+          true
+        } catch {
+          case e: Throwable => {
+            error(s"run spark sql [ ${rule} ] error: ${e.getMessage}")
+            false
+          }
+        }
+      }
+      case _ => false
+    }
+  }
+
+}
+
+
+
+


[04/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/input.msg
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/input.msg b/measure/src/test/resources/input.msg
new file mode 100644
index 0000000..edb4619
--- /dev/null
+++ b/measure/src/test/resources/input.msg
@@ -0,0 +1 @@
+{"confId":28,"seeds":["{\"url\":\"https://www.amazon.com/bridge-across-time-myths-history/dp/0855000449/ref\\u003dsr_1_1/186-6687480-5099813?ie\\u003dUTF8\\u0026keywords\\u003d0855000449\\u0026qid\\u003d1434663708\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0855000449\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/8479538562/ref\\u003dsr_1_9?ie\\u003dUTF8\\u0026qid\\u003d47073302\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"8479538562\\\",\\\"referencePrice\\\":0.0,\\\"refe
 renceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1619619172/ref\\u003ds9_simh_gw_p63_d3_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d80956L21ZP4Y9DRF74Z5\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d988263899\\u0026pf_rd_i\\u003d891862\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1619619172\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1857751507/ref\\u003ds9_simh_gw_p74_d2_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d81TU9DQLRW1TQM56LWW3\\u0026pf_rd_t\\
 u003d101\\u0026pf_rd_p\\u003d156743226\\u0026pf_rd_i\\u003d729862\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1857751507\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1481714317/ref\\u003ds9_simh_gw_p107_d0_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dKB37RY3JE5HKQ5G4630T\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d331346081\\u0026pf_rd_i\\u003d575468\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1481714317\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"trac
 ker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1403775141/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1403775141\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Letters-Amelia-Diana-Turner-Forte/dp/0533157684/ref\\u003dsr_1_1/184-7208233-4184259?ie\\u003dUTF8\\u0026keywords\\u003d9780533157686\\u0026qid\\u003d1434675492\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0533157684\\\",\\\"referencePrice\\\":0
 .0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1591022320/ref\\u003ds9_simh_gw_p63_d3_i5?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dWVP9OE92HD77NSJXQZIL\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d556903239\\u0026pf_rd_i\\u003d594333\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1591022320\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Medieval-Early-Modern-Times-Janet/dp/9994609912/ref\\u003dsr_1_1/191-6299628-5905209?ie\\u003dUTF8\\u0026keywords\\u003d9789994609918\\u0026qid\\u0
 03d1434432677\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"9994609912\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1493574175/ref\\u003ds9_simh_gw_p74_d2_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d1OZ6A0RNM4S8JJ01245S\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d124388178\\u0026pf_rd_i\\u003d596200\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1493574175\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\
 \\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1598598635/ref\\u003ds9_ri_gw_g201_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d7O715KMM6Y744QTQ4LIU\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d673955261\\u0026pf_rd_i\\u003d649226\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1598598635\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Escape-Tibet-Nick-Gray/dp/095715190X/ref\\u003dsr_1_1/175-7416659-0137139?ie\\u003dUTF8\\u0026keywords\\u003d9780957151901\\u0026qid\\u003d1434508924\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\
 \\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"095715190X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Man-About-House-Definitive-Companion-ebook/dp/B005AKCFXA\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B005AKCFXA\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/112067641X/ref\\u003ds9_ri_gw_g201_i5?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d0SIZV93I9RQ1669EES6L\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d504839570
 \\u0026pf_rd_i\\u003d161781\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"112067641X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Unnatural-Death-Pedigree-Dorothy-Sayers/dp/B005TCM1X8/ref\\u003dsr_1_1/188-6111852-5942804?ie\\u003dUTF8\\u0026keywords\\u003d9780450020988\\u0026qid\\u003d1434450513\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B005TCM1X8\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extr
 Feilds\":[]}","{\"url\":\"https://www.amazon.com/James-Castle-Common-Place-0970425716/dp/B002J813MA/ref\\u003dsr_1_2/176-6368479-5598662?ie\\u003dUTF8\\u0026keywords\\u003d0970425716\\u0026qid\\u003d1434422520\\u0026sr\\u003d8-2\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B002J813MA\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1940516099/ref\\u003ds9_ri_gw_g201_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dR8JQ4JMW6P3SOJL36M9M\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d535308337\\u0026pf_rd_i\\u003d184331\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlM
 etadata\\\":{\\\"retailerProductId\\\":\\\"1940516099\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1240463456/ref\\u003ds9_ri_gw_g201_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dBL66W75USY907YG001QF\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d720488364\\u0026pf_rd_i\\u003d676890\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1240463456\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/CARSON-DELLOSA-BRIDGE-ORANGE-4-5/dp/1932210652/ref\\u003dsr_1_1/186-9
 625969-7817661?ie\\u003dUTF8\\u0026keywords\\u003d1932210652\\u0026qid\\u003d1434486504\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1932210652\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/B00898M8X2/ref\\u003ds9_simh_gw_p74_d2_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d6RTM9ED4621X306QR2Y3\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d205846301\\u0026pf_rd_i\\u003d110484\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00898M8X2\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPric
 e\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1597549045/ref\\u003ds9_simh_gw_p63_d3_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dOLWI416TK382276FRI8S\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d971991139\\u0026pf_rd_i\\u003d747799\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1597549045\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Betrayal-Empty-Coffin-Novel/dp/1469216868/ref\\u003dsr_1_1/175-7820384-1782744?ie\\u003dUTF8\\u0026keywords\\u003d1469216868\\u0026qid\\u003d1434594698\\u0026sr\\u003d8-1\",\
 "metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1469216868\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/American-Police-Systems-Raymond-Fosdick/dp/B001KUVGPW/ref\\u003dsr_1_1/176-3370848-7400366?ie\\u003dUTF8\\u0026keywords\\u003d0875859097\\u0026qid\\u003d1434597754\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B001KUVGPW\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://
 www.amazon.com/gp/product/B00KDV1514/ref\\u003dsr_1_1?ie\\u003dUTF8\\u0026qid\\u003d68571520\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00KDV1514\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1599928825/ref\\u003ds9_ri_gw_g201_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d4TLP90H57YN3J9USNZS0\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d206214160\\u0026pf_rd_i\\u003d410077\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1599928825\\\",\\\"referencePrice\\\":0.
 0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Obeah-Bible-L-W-Laurence/dp/1456472992/184-5031071-1689052?ie\\u003dUTF8\\u0026ref_\\u003ds9_simh_gw_p107_d0_i9\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1456472992\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/B00XJOGUL2/ref\\u003ds9_simh_gw_p63_d3_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d6K4IS5NT23VSY8RK09SM\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d971785392\\u0026pf_rd_i\\u003d254059\",\"metadata\":\"{\\\"crawlType\\\":
 \\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"metaCategId\\\":625,\\\"leafCategId\\\":43479,\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00XJOGUL2\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/0735841594/ref\\u003ds9_simh_gw_p74_d2_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d4S47E7MUXUTX2OAS03M7\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d858738605\\u0026pf_rd_i\\u003d329711\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0735841594\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}
 \",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1401246125/ref\\u003ds9_ri_gw_g201_i1?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dD5ZSN8FGJFRE1P6P06FN\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d986128032\\u0026pf_rd_i\\u003d431901\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1401246125\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0531233545/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0531233545\\\",\\\"referencePrice\\\":0.0,\\
 \"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Magpies-Psychological-Thriller-Mark-Edwards/dp/1483911896/ref\\u003dsr_1_1/186-3277371-2912266?ie\\u003dUTF8\\u0026keywords\\u003d9781483911892\\u0026qid\\u003d1434440841\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1483911896\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/149936816X/ref\\u003ds9_ri_gw_g201_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dA2G0NAMUI8Y2SLIOG6K1\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d206
 377910\\u0026pf_rd_i\\u003d192380\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"149936816X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0321993306/ref\\u003ds9_simh_gw_p74_d2_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dP49OLGZR6428DSLYD41K\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d464469450\\u0026pf_rd_i\\u003d713302\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0321993306\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS
 \\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Arcana-Archives-Catalogo-Febbraio-9-Settembre/dp/8860604222/ref\\u003dsr_1_1/191-2294536-5098349?ie\\u003dUTF8\\u0026keywords\\u003d9788860604224\\u0026qid\\u003d1434516786\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"8860604222\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1287198279/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1287198279\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice
 \\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/B005AW9VPI/ref\\u003ds9_simh_gw_p79_d17_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dSH241RMHIXZ0P4OG3QO7\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d946293479\\u0026pf_rd_i\\u003d418397\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B005AW9VPI\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1844651258/ref\\u003dsr_1_2?ie\\u003dUTF8\\u0026qid\\u003d70179092\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",
 \\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1844651258\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1484105176/ref\\u003dsr_1_4?ie\\u003dUTF8\\u0026qid\\u003d40145443\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1484105176\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Monday-Morning-Leadership-Valerie-Sokolosky/dp/097464031X/ref\\u003dsr_1_1/192-6857494-6582456?ie\\u00
 3dUTF8\\u0026keywords\\u003d9780974640310\\u0026qid\\u003d1434634974\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"097464031X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1288670427/ref\\u003ds9_simh_gw_p79_d17_i5?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dEF7S9AY28SM61TL6P5XR\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d377137900\\u0026pf_rd_i\\u003d382960\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288670427\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\
 ":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/B00K8UNGGW/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00K8UNGGW\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Watcher-Another-World-J-Wilson/dp/187167672X/ref\\u003dsr_1_1/179-3023112-0477816?ie\\u003dUTF8\\u0026keywords\\u003d9781871676723\\u0026qid\\u003d1434542107\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"187167672X\\\",\\\"referencePrice\\\
 ":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Mad-Black-Lady-Wanda-Coleman/dp/0876854129/ref\\u003dsr_1_1/182-7097983-9105503?ie\\u003dUTF8\\u0026keywords\\u003d0876854129\\u0026qid\\u003d1434701480\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0876854129\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Every-Word-Fist-Amelia-Garcia/dp/146620818X\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerPro
 ductId\\\":\\\"146620818X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/B00873B0CO/ref\\u003ds9_simh_gw_p74_d2_i5?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d0XY59A109S39P6ID1N23\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d295494213\\u0026pf_rd_i\\u003d478561\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00873B0CO\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Springboard-Discovery-Mary-Lou-Lacy/dp/0804235953\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRO
 DUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0804235953\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Aventures-extraordinaires-dAdÃ%C2%83Â%C2%83Â%C2%83Ã%C2%83Â%C2%82Â%C2%83Ã%C2%83Â%C2%82Â%C2%83Ã%C2%83Â%C2%83Â%C2%83Ã%C2%83Â%C2%82Â%C2%82Ã%C2%83Â%C2%82Â%C2%83Ã%C2%83Â%C2%83Â%C2%83Ã%C2%83Â%C2%82Â%C2%83Ã%C2%83Â%C2%82Â%C2%85Ã%C2%83Â%C2%83Â%C2%83Ã%C2%83Â%C2%82Â%C2%82Ã%C2%83Â%C2%83Â%C2%82Ã%C2%83Â%C2%82Ã%C2%82¡le-Blanc-Sec/dp/229032096X\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\"
 :\\\"229032096X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1480512796/ref\\u003ds9_ri_gw_g201_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dK4N7A64LBYP7TXSR9W49\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d681535512\\u0026pf_rd_i\\u003d230294\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1480512796\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/crise-conscience-europeenne-1680-1715-French/dp/221300613X/ref\\u003dsr_1_1/189-4114609-4176061?ie\\u003dUT
 F8\\u0026keywords\\u003d9782213006130\\u0026qid\\u003d1434473448\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"221300613X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1743170246/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1743170246\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1249925037/ref\\u003ds9_simh_gw_p
 63_d3_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d1ZX7BOB71HEJOS512320\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d213504420\\u0026pf_rd_i\\u003d514621\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249925037\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Lifetime-Volunteer-Frates-Joan-Gilmore/dp/1885596499/ref\\u003dsr_1_1/184-6208647-5211900?ie\\u003dUTF8\\u0026keywords\\u003d1885596499\\u0026qid\\u003d1434664139\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1885596499\\\",\\\"referencePrice\\\
 ":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1507597142/ref\\u003ds9_simh_gw_p107_d0_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dG88Q8FMLD0RS001F6STG\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d192898073\\u0026pf_rd_i\\u003d725640\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1507597142\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0691614385/ref\\u003dsr_1_7?ie\\u003dUTF8\\u0026qid\\u003d11874892\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\
 \"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0691614385\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/150531089X/ref\\u003dsr_1_8?ie\\u003dUTF8\\u0026qid\\u003d93416624\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"150531089X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1591859581/ref\\u003ds9_simh_gw_p74_d2_i7?pf_rd_m\\
 u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dD89JO942AP0BGI8VHY6Z\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d259933109\\u0026pf_rd_i\\u003d594179\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1591859581\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/B0077D8O60/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B0077D8O60\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"
 \",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0957401515/ref\\u003ds9_simh_gw_p63_d3_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dP68SFUUGN08H8EL54714\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d948202552\\u0026pf_rd_i\\u003d899110\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0957401515\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/3849554147/ref\\u003ds9_simh_gw_p79_d17_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d1QMUT47S5N1OP87AM79G\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d664853631\\u0026pf_rd_i\\u003d825108\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerNa
 me\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"3849554147\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Regulating-Pesticides-Commission-Natural-Resources/dp/0309029465/ref\\u003dsr_1_1/186-2886194-1044035?ie\\u003dUTF8\\u0026keywords\\u003d0309029465\\u0026qid\\u003d1434700978\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0309029465\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/AutoCAD-2006-Tutorial-First-Fundament
 als/dp/1585032301\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1585032301\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1493162667/ref\\u003ds9_simh_gw_p63_d3_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d5B1N58OK9DGNSXAYO648\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d490117977\\u0026pf_rd_i\\u003d302914\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1493162667\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":14868996020
 00}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1482532700/ref\\u003ds9_simh_gw_p79_d17_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d2A85UB174G4QB67Q6W4E\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d469121724\\u0026pf_rd_i\\u003d652299\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1482532700\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1289164118/ref\\u003dsr_1_2?ie\\u003dUTF8\\u0026qid\\u003d18547913\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\
 \\":{\\\"retailerProductId\\\":\\\"1289164118\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1275841597/ref\\u003dsr_1_6?ie\\u003dUTF8\\u0026qid\\u003d83157364\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1275841597\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/How-Maximize-Your-Profit-maximize/dp/1505906075\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\
 ":{\\\"retailerProductId\\\":\\\"1505906075\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1436679060/ref\\u003ds9_simh_gw_p79_d17_i1?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dJ9SOI248X12J3TS2DL0V\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d526717715\\u0026pf_rd_i\\u003d369502\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1436679060\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Plant-Molecular-Biology-Essential-Techniques/dp/0471972681/ref\\u003dsr_1_1/190-92
 97182-6303650?ie\\u003dUTF8\\u0026keywords\\u003d9780471972686\\u0026qid\\u003d1434705995\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0471972681\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Europe-Exporters-Handbook-Jenner/dp/0871965658/ref\\u003dsr_1_1/189-3435491-9141552?ie\\u003dUTF8\\u0026keywords\\u003d9780871965653\\u0026qid\\u003d1434701852\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0871965658\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\
 \"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1288527004/ref\\u003ds9_simh_gw_p79_d17_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dL676IU8SE79EB28460T4\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d839567868\\u0026pf_rd_i\\u003d952718\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288527004\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Groups-Practice-Marianne-Schneider-Hardcover/dp/0534367453/ref\\u003dsr_1_1/186-0267917-7112209?ie\\u003dUTF8\\u0026keywords\\u003d0534367453\\u0026qid\\u003d1434879089\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\
 \\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0534367453\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/161034958X/ref\\u003ds9_simh_gw_p79_d17_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dLDIXFU564R4V24ZK6SN2\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d473453365\\u0026pf_rd_i\\u003d771807\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"161034958X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"
 url\":\"https://www.amazon.com/dp/product/B00049QL5U/ref\\u003ds9_simh_gw_p107_d0_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dY564P8EU9SRSEXNYP77G\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d561884715\\u0026pf_rd_i\\u003d312289\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B00049QL5U\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1618852183/ref\\u003ds9_simh_gw_p107_d0_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dKM0Q6544JP28U5676KZF\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d974311606\\u0026pf_rd_i\\u003d652689\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_
 us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1618852183\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Models-Dermatology-Vol/dp/3805547617\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"3805547617\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0684859726/ref\\u003ds9_simh_gw_p79_d17_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d727ES4S50NPD7M8F4XLO\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d475467204\\u0026
 pf_rd_i\\u003d897737\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0684859726\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1449788386/ref\\u003dsr_1_3?ie\\u003dUTF8\\u0026qid\\u003d34593636\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1449788386\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1622125290/ref\\u0
 03ds9_simh_gw_p107_d0_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d46Q876OOMM3D6XSDG7VM\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d954847296\\u0026pf_rd_i\\u003d374067\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1622125290\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1494559447/ref\\u003ds9_simh_gw_p79_d17_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dW5LSQ3KS109584JUY4A0\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d694179817\\u0026pf_rd_i\\u003d134867\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":
 {\\\"retailerProductId\\\":\\\"1494559447\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Proceedings-1-2-Classical-Association-Wales/dp/1236636929/ref\\u003dsr_1_1/184-1359550-5736018?ie\\u003dUTF8\\u0026keywords\\u003d9781236636928\\u0026qid\\u003d1434649299\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1236636929\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1287003001/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerNa
 me\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1287003001\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1249180309/ref\\u003dsr_1_10?ie\\u003dUTF8\\u0026qid\\u003d32633819\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249180309\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Michelin-Red-Guide-Spain-Portugal/dp/2060063779/ref\\u003dsr_1_1/179-4403527-2002065?ie\\u003dUTF8\\u0026ke
 ywords\\u003d9782060063775\\u0026qid\\u003d1434621406\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"2060063779\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Homicidal-Aliens-Other-Disappointments-Invasion/dp/1480518522/ref\\u003dsr_1_1/186-9648743-6858653?ie\\u003dUTF8\\u0026keywords\\u003d1480518522\\u0026qid\\u003d1434660937\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1480518522\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\
 \":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/0984635742/ref\\u003ds9_simh_gw_p79_d17_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d21YWJSFV761872B02I7H\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d175584267\\u0026pf_rd_i\\u003d665215\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0984635742\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1583335331/ref\\u003ds9_simh_gw_p74_d2_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dPLKAEVA1UJH99O85N5K2\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d919768077\\u0026pf_rd_i\\u003d142182\",\"metadata\":\"{\\\"cra
 wlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1583335331\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1305256662/ref\\u003ds9_ri_gw_g201_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d8V35F53XPBW54M0TO89S\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d736378767\\u0026pf_rd_i\\u003d803718\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1305256662\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"u
 rl\":\"https://www.amazon.com/Biological-Rhythm-Research-Sollberger/dp/0444405496/ref\\u003dsr_1_1/184-0842412-0577011?ie\\u003dUTF8\\u0026keywords\\u003d0444405496\\u0026qid\\u003d1434660594\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0444405496\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/0826520235/ref\\u003ds9_simh_gw_p79_d17_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d17N0U1085ISNRS396YHI\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d909993468\\u0026pf_rd_i\\u003d167394\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetada
 ta\\\":{\\\"retailerProductId\\\":\\\"0826520235\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1452089175/ref\\u003ds9_simh_gw_p63_d3_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d9S13UGLB1J3M03X087L7\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d683610611\\u0026pf_rd_i\\u003d897880\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1452089175\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1286720451/ref\\u003ds9_simh_gw_p79_d17_i2?pf_rd_m\\u003dATVPDKIKX0
 DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dE82MQGB807FFVWB3Q44R\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d104104751\\u0026pf_rd_i\\u003d243022\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1286720451\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1499041055/ref\\u003ds9_ri_gw_g201_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dB5S17N4WLLSPDD34YDY8\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d872947608\\u0026pf_rd_i\\u003d644957\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1499041055\\\",\\\"refe
 rencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1288769598/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288769598\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Encyclopaedia-United-States-Spacecraft-Bison/dp/0600500519/ref\\u003dsr_1_1/189-9370660-2213859?ie\\u003dUTF8\\u0026keywords\\u003d9780600500513\\u0026qid\\u003d1434620291\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productC
 rawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0600500519\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/ANTONIO-VIVALDI-Garland-reference-humanities/dp/0824083865\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0824083865\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1465417117/ref\\u003dsr_1_4?ie\\u003dUTF8\\u0026qid\\u003d16061560\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US
 \\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1465417117\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0813224535/ref\\u003ds9_simh_gw_p74_d2_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dSHQ8F60N372GY56JWF09\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d412949045\\u0026pf_rd_i\\u003d425983\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0813224535\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1495421996/ref\\u003ds9_ri_gw_g2
 01_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dX9D65SVNFOVU7N8SWF3O\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d165045927\\u0026pf_rd_i\\u003d372797\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1495421996\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/3640223098/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"3640223098\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"lin
 kSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Soldier-Official-Guide-Accompany-ITV/dp/B0041CQMJG\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B0041CQMJG\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1480031003/ref\\u003ds9_simh_gw_p63_d3_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d0RSF4Y1580S2V22H2JSG\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d328419621\\u0026pf_rd_i\\u003d991785\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1480031003\\\",\\\"referencePrice\\\":0.0,\\\"referenceSh
 ippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/My-Harvest-Home-Celebration-Customs/dp/0937203688/ref\\u003dsr_1_1/185-2501167-9365558?ie\\u003dUTF8\\u0026keywords\\u003d9780937203682\\u0026qid\\u003d1434501536\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0937203688\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Halfway-Decent-Ernie-Hudson/dp/B001A4YO20\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\
 \"B001A4YO20\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1288916906/ref\\u003ds9_simh_gw_p74_d2_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dONRG59HTI83U545RG01Y\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d669707968\\u0026pf_rd_i\\u003d353895\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288916906\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1288725108/ref\\u003dsr_1_5?ie\\u003dUTF8\\u0026qid\\u003d97736076\\u0026sr\\u003d8-1\\u0026keywords\\u
 003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288725108\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1462067514/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1462067514\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1290713707/ref\\u003ds9_simh_gw_p107_d0_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\
 \u0026pf_rd_r\\u003dR9AF8E0I0OVS17OZWIL0\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d819156518\\u0026pf_rd_i\\u003d124919\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1290713707\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Het-historische-gedicht-geschiedenis-Vlaanderen/dp/9057592851/ref\\u003dsr_1_1/176-4708134-2081319?ie\\u003dUTF8\\u0026keywords\\u003d9789057592850\\u0026qid\\u003d1434457720\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"9057592851\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors
 \\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1289300151/ref\\u003ds9_simh_gw_p63_d3_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dDW5AQM8981BG14F3OR4U\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d353942470\\u0026pf_rd_i\\u003d642212\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1289300151\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Christines-Picture-Book-Christian-Andersen/dp/0862720893/ref\\u003dsr_1_1/179-3046203-1921448?ie\\u003dUTF8\\u0026keywords\\u003d9780862720896\\u0026qid\\u003d1434426244\\u0026sr\\u003d8-1\",\"metad
 ata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0862720893\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1249461162/ref\\u003ds9_simh_gw_p79_d17_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d57YUBLM3S5JH10JY6SO4\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d826066547\\u0026pf_rd_i\\u003d907699\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249461162\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\
 "\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1246822881/ref\\u003dsr_1_10?ie\\u003dUTF8\\u0026qid\\u003d18489479\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1246822881\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1497903920/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1497903920\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors
 \":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1105315800/ref\\u003ds9_simh_gw_p74_d2_i2?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dISLI9GZS5J520O513PK6\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d191025471\\u0026pf_rd_i\\u003d563906\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1105315800\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1249420024/ref\\u003ds9_simh_gw_p63_d3_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dBG6V7MK62E7322D5VS4K\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d335458808\\u0026pf_rd_i\\u003d388332\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\
 \"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249420024\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1287198848/ref\\u003ds9_simh_gw_p107_d0_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dUATP2780PEBSP3MZ85J9\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d724971579\\u0026pf_rd_i\\u003d737723\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1287198848\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://
 www.amazon.com/dp/product/1599152088/ref\\u003ds9_simh_gw_p74_d2_i1?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d1R9QZBS3LHGN2QKTW84D\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d285986720\\u0026pf_rd_i\\u003d167207\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1599152088\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1287183700/ref\\u003ds9_simh_gw_p79_d17_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d4W49A4JP56S6IELDOF3S\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d604578060\\u0026pf_rd_i\\u003d666380\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\
 \":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1287183700\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1470817403/ref\\u003ds9_simh_gw_p63_d3_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d72AXH8EE5ZQTRY727NNV\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d213474611\\u0026pf_rd_i\\u003d351283\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1470817403\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Sonnets-Facing-Conviction-Christo
 pher-Presfield/dp/1891812203\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1891812203\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Dead-Ringer-Jasmine-Cresswell/dp/B0010BSZ5U/ref\\u003dsr_1_1/190-6876153-5019461?ie\\u003dUTF8\\u0026keywords\\u003d1551667126\\u0026qid\\u003d1434458583\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B0010BSZ5U\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}
 ","{\"url\":\"https://www.amazon.com/dp/1933660996/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1933660996\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Sherington-Fiefs-Fields-Buckinghamshire-Village/dp/0521046378/ref\\u003dsr_1_1/182-5625910-7791542?ie\\u003dUTF8\\u0026keywords\\u003d9780521046374\\u0026qid\\u003d1434591978\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0521046378\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlReque
 stCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0452297060/ref\\u003ds9_ri_gw_g201_i6?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d9WQLGU3K20TZ97OLNE9N\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d817830578\\u0026pf_rd_i\\u003d592054\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0452297060\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Believers-Pocket-Companion-Needful-Sinners/dp/1889058092/ref\\u003dsr_1_1/181-8057383-5400821?ie\\u003dUTF8\\u0026keywords\\u003d1889058092\\u0026qid\\u003d1434591575\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"
 retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1889058092\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1288686684/ref\\u003ds9_simh_gw_p107_d0_i5?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d6G636UL89WPEJRNWM864\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d863114753\\u0026pf_rd_i\\u003d127597\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1288686684\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www
 .amazon.com/dp/product/1441968091/ref\\u003ds9_simh_gw_p107_d0_i7?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003dVD3N8XQHIN218881396G\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d591946161\\u0026pf_rd_i\\u003d784879\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1441968091\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Illustrated-Skating-Dictionary-Young-People/dp/013451260X/ref\\u003dsr_1_1/187-7738327-4220041?ie\\u003dUTF8\\u0026keywords\\u003d9780134512600\\u0026qid\\u003d1434671356\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\
 \\"retailerProductId\\\":\\\"013451260X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1286427355/ref\\u003ds9_simh_gw_p63_d3_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dG08BDQF87W390SN0VKSY\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d609639660\\u0026pf_rd_i\\u003d873910\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1286427355\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1592999107/ref\\u003ds9_simh_gw_p63_d3_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u002
 6pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d0GZEX2Z031943Y98PS2O\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d207358316\\u0026pf_rd_i\\u003d646461\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1592999107\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/0971487421/ref\\u003ds9_simh_gw_p79_d17_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dVP2GYS4S48L27YR91NGX\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d288057837\\u0026pf_rd_i\\u003d811217\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0971487421\\\",\\\"referenc
 ePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Dakine-Traction-12-25-Inch-Black-Smoke/dp/B009HYI5U2/ref\\u003dsr_1_1/190-3075409-4147065?ie\\u003dUTF8\\u0026keywords\\u003d0610934775921\\u0026qid\\u003d1434572027\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"B009HYI5U2\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1505465214/ref\\u003ds9_simh_gw_p79_d17_i10?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dS88VYH0JSN0WV58N763S\\u0026pf_rd_t\\u003d101\\u002
 6pf_rd_p\\u003d469060850\\u0026pf_rd_i\\u003d923112\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1505465214\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1249477794/ref\\u003ds9_ri_gw_g201_i3?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003d1YEA01A3X8LEV2A1XUYL\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d163201523\\u0026pf_rd_i\\u003d432311\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249477794\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlR
 equestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1439101515/ref\\u003dsr_1_10?ie\\u003dUTF8\\u0026qid\\u003d41353122\\u0026sr\\u003d8-1\\u0026keywords\\u003dnull\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1439101515\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/0871522748/ref\\u003ds9_simh_gw_p63_d3_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dM2XLTA5E84OZ9WW38KL1\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d173836115\\u0026pf_rd_i\\u003d585725\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US
 \\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0871522748\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Guide-Florida-legal-research-Stupski/dp/0327155868/ref\\u003dsr_1_1/178-6731625-0968554?ie\\u003dUTF8\\u0026keywords\\u003d0327155868\\u0026qid\\u003d1434593715\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0327155868\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/0735341257/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PR
 ODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0735341257\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1287251641/ref\\u003ds9_simh_gw_p63_d3_i1?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-1\\u0026pf_rd_r\\u003d1KEUJ6L7GYD56MD665L5\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d508856645\\u0026pf_rd_i\\u003d515955\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1287251641\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https:
 //www.amazon.com/Points-2012-2013-Handbook-Brigade-Midshipmen/dp/161251149X/ref\\u003dsr_1_1/191-7390079-6376012?ie\\u003dUTF8\\u0026keywords\\u003d9781612511498\\u0026qid\\u003d1434621407\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"161251149X\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/9050634788/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"9050634788\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":
 1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1406829730/ref\\u003ds9_simh_gw_p63_d3_i8?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d86Y0VR3VTP14529B9TK2\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d516175225\\u0026pf_rd_i\\u003d152667\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1406829730\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1120716756/ref\\u003dcm_cr_pr_product_top\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1120716756\\\",\\\"refere
 ncePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1449781837/ref\\u003ds9_ri_gw_g201_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dQI0MOKJ0782RNRSXO3EJ\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d853693070\\u0026pf_rd_i\\u003d466575\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1449781837\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Omnibus-Pusher-Kings-Ransom-Money/dp/1407221035/ref\\u003dsr_1_1/187-4173899-5264528?ie\\u003dUTF8\\u0026keywords\\u003d9781407221038\\u0
 026qid\\u003d1434673964\\u0026sr\\u003d8-1\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1407221035\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/gp/product/1500892181/ref\\u003ds9_simh_gw_p107_d0_i1?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003d3S5TF02AB6MRAFME7AKJ\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d572077303\\u0026pf_rd_i\\u003d484731\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1500892181\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlReque
 stCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/product/1847975313/ref\\u003ds9_ri_gw_g201_i9?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-3\\u0026pf_rd_r\\u003dAJO40FSP5P875338JO26\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d321291056\\u0026pf_rd_i\\u003d704134\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1847975313\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/dp/1249000106/ref\\u003ds9_simh_gw_p74_d2_i4?pf_rd_m\\u003dATVPDKIKX0DER\\u0026pf_rd_s\\u003dcenter-2\\u0026pf_rd_r\\u003dIY3AAVY7WEF77SSKDY0S\\u0026pf_rd_t\\u003d101\\u0026pf_rd_p\\u003d124726089\\u0026pf_rd_i\\u003d902185\",\"metadata\":\"{
 \\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1249000106\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Golden-Dawn-Journal-Book-Llewellyns/dp/1567188516\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us\\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"1567188516\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Wee-Pals-Knowledge-Morrie-Turner/dp/0451058003\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"retailerName\\\":\\\"amazon_us
 \\\",\\\"site\\\":\\\"US\\\",\\\"productCrawlMetadata\\\":{\\\"retailerProductId\\\":\\\"0451058003\\\",\\\"referencePrice\\\":0.0,\\\"referenceShippingPrice\\\":0.0,\\\"selectors\\\":[]},\\\"tracker\\\":{\\\"crawlRequestCreateTS\\\":1486899602000}}\",\"linkSelectors\":\"\",\"extrFeilds\":[]}","{\"url\":\"https://www.amazon.com/Sourcebook-Public-Record-Providers-Investigative/dp/1879792060\",\"metadata\":\"{\\\"crawlType\\\":\\\"PRODUCT\\\",\\\"reta

<TRUNCATED>


[06/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/PersistType.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/PersistType.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/PersistType.scala
new file mode 100644
index 0000000..10b83c8
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/PersistType.scala
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl
+
+import scala.util.matching.Regex
+
+sealed trait PersistType {
+  val regex: Regex
+  val desc: String
+//  def temp: Boolean = false
+//  def persist: Boolean = false
+//  def collect: Boolean = false
+}
+
+object PersistType {
+  private val persistTypes: List[PersistType] = List(RecordPersistType, MetricPersistType, NonePersistType)
+  def apply(ptn: String): PersistType = {
+    persistTypes.filter(tp => ptn match {
+      case tp.regex() => true
+      case _ => false
+    }).headOption.getOrElse(NonePersistType)
+  }
+  def unapply(pt: PersistType): Option[String] = Some(pt.desc)
+}
+
+final case object NonePersistType extends PersistType {
+  val regex: Regex = "".r
+  val desc: String = "none"
+}
+
+final case object RecordPersistType extends PersistType {
+  val regex: Regex = "^(?i)record$".r
+  val desc: String = "record"
+//  override def temp: Boolean = true
+}
+
+final case object MetricPersistType extends PersistType {
+  val regex: Regex = "^(?i)metric$".r
+  val desc: String = "metric"
+//  override def temp: Boolean = true
+//  override def collect: Boolean = true
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/AccuracyAnalyzer.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/AccuracyAnalyzer.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/AccuracyAnalyzer.scala
new file mode 100644
index 0000000..7efb32e
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/AccuracyAnalyzer.scala
@@ -0,0 +1,41 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.analyzer
+
+import org.apache.griffin.measure.rule.dsl.expr._
+
+
+case class AccuracyAnalyzer(expr: LogicalExpr, sourceName: String, targetName: String) extends BasicAnalyzer {
+
+  val dataSourceNames = expr.preOrderTraverseDepthFirst(Set[String]())(seqDataSourceNames, combDataSourceNames)
+
+  val sourceSelectionExprs = {
+    val seq = seqSelectionExprs(sourceName)
+    expr.preOrderTraverseDepthFirst(Seq[SelectionExpr]())(seq, combSelectionExprs)
+  }
+  val targetSelectionExprs = {
+    val seq = seqSelectionExprs(targetName)
+    expr.preOrderTraverseDepthFirst(Seq[SelectionExpr]())(seq, combSelectionExprs)
+  }
+
+  val selectionExprs = sourceSelectionExprs ++ {
+    expr.preOrderTraverseDepthFirst(Seq[AliasableExpr]())(seqWithAliasExprs, combWithAliasExprs)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/BasicAnalyzer.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/BasicAnalyzer.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/BasicAnalyzer.scala
new file mode 100644
index 0000000..063eb7b
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/BasicAnalyzer.scala
@@ -0,0 +1,53 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.analyzer
+
+import org.apache.griffin.measure.rule.dsl.expr._
+
+
+trait BasicAnalyzer extends Serializable {
+
+  val expr: Expr
+
+  val seqDataSourceNames = (expr: Expr, v: Set[String]) => {
+    expr match {
+      case DataSourceHeadExpr(name) => v + name
+      case _ => v
+    }
+  }
+  val combDataSourceNames = (a: Set[String], b: Set[String]) => a ++ b
+
+  val seqSelectionExprs = (dsName: String) => (expr: Expr, v: Seq[SelectionExpr]) => {
+    expr match {
+      case se @ SelectionExpr(head: DataSourceHeadExpr, _, _) if (head.desc == dsName) => v :+ se
+      case _ => v
+    }
+  }
+  val combSelectionExprs = (a: Seq[SelectionExpr], b: Seq[SelectionExpr]) => a ++ b
+
+  val seqWithAliasExprs = (expr: Expr, v: Seq[AliasableExpr]) => {
+    expr match {
+      case se: SelectExpr => v
+      case a: AliasableExpr if (a.alias.nonEmpty) => v :+ a
+      case _ => v
+    }
+  }
+  val combWithAliasExprs = (a: Seq[AliasableExpr], b: Seq[AliasableExpr]) => a ++ b
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/ProfilingAnalyzer.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/ProfilingAnalyzer.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/ProfilingAnalyzer.scala
new file mode 100644
index 0000000..34bdbd3
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/analyzer/ProfilingAnalyzer.scala
@@ -0,0 +1,52 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.analyzer
+
+import org.apache.griffin.measure.rule.dsl.expr._
+
+
+case class ProfilingAnalyzer(expr: ProfilingClause, sourceName: String) extends BasicAnalyzer {
+
+  val dataSourceNames = expr.preOrderTraverseDepthFirst(Set[String]())(seqDataSourceNames, combDataSourceNames)
+
+  val sourceSelectionExprs = {
+    val seq = seqSelectionExprs(sourceName)
+    expr.selectClause.preOrderTraverseDepthFirst(Seq[SelectionExpr]())(seq, combSelectionExprs)
+  }
+
+  val selectionExprs = expr.selectClause.exprs.map(_.extractSelf)
+  def containsAllSelectionExpr = {
+    selectionExprs.filter { expr =>
+      expr match {
+        case SelectionExpr(head: ALLSelectHeadExpr, selectors: Seq[SelectExpr], _) => {
+          selectors.isEmpty
+        }
+        case SelectionExpr(head: DataSourceHeadExpr, selectors: Seq[SelectExpr], _) => {
+          (head == sourceName) && (selectors.size == 1) && (selectors.head.isInstanceOf[AllFieldsSelectExpr])
+        }
+        case _ => false
+      }
+    }.size > 0
+  }
+
+  val groupbyExprOpt = expr.groupbyClauseOpt
+  val preGroupbyExprs = expr.preGroupbyClauses.map(_.extractSelf)
+  val postGroupbyExprs = expr.postGroupbyClauses.map(_.extractSelf)
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/AliasableExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/AliasableExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/AliasableExpr.scala
new file mode 100644
index 0000000..33a12e0
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/AliasableExpr.scala
@@ -0,0 +1,25 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait AliasableExpr extends Expr {
+
+  def alias: Option[String]
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/ClauseExpression.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/ClauseExpression.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/ClauseExpression.scala
new file mode 100644
index 0000000..26882b4
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/ClauseExpression.scala
@@ -0,0 +1,150 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait ClauseExpression extends Expr {
+}
+
+case class SelectClause(exprs: Seq[Expr]) extends ClauseExpression {
+
+  addChildren(exprs)
+
+  def desc: String = s"${exprs.map(_.desc).mkString(", ")}"
+  def coalesceDesc: String = s"${exprs.map(_.desc).mkString(", ")}"
+
+}
+
+case class WhereClause(expr: Expr) extends ClauseExpression {
+
+  addChild(expr)
+
+  def desc: String = s"WHERE ${expr.desc}"
+  def coalesceDesc: String = s"WHERE ${expr.coalesceDesc}"
+
+}
+
+case class GroupbyClause(exprs: Seq[Expr], havingClauseOpt: Option[Expr]) extends ClauseExpression {
+
+  addChildren(exprs ++ havingClauseOpt.toSeq)
+
+  def desc: String = {
+    val gbs = exprs.map(_.desc).mkString(", ")
+    havingClauseOpt match {
+      case Some(having) => s"GROUP BY ${gbs} HAVING ${having.desc}"
+      case _ => s"GROUP BY ${gbs}"
+    }
+  }
+  def coalesceDesc: String = {
+    val gbs = exprs.map(_.desc).mkString(", ")
+    havingClauseOpt match {
+      case Some(having) => s"GROUP BY ${gbs} HAVING ${having.coalesceDesc}"
+      case _ => s"GROUP BY ${gbs}"
+    }
+  }
+
+  def merge(other: GroupbyClause): GroupbyClause = {
+    val newHavingClauseOpt = (havingClauseOpt, other.havingClauseOpt) match {
+      case (Some(hc), Some(ohc)) => {
+        val logical1 = LogicalFactorExpr(hc, false, None)
+        val logical2 = LogicalFactorExpr(ohc, false, None)
+        Some(BinaryLogicalExpr(logical1, ("AND", logical2) :: Nil))
+      }
+      case (a @ Some(_), _) => a
+      case (_, b @ Some(_)) => b
+      case (_, _) => None
+    }
+    GroupbyClause(exprs ++ other.exprs, newHavingClauseOpt)
+  }
+
+}
+
+case class OrderbyItem(expr: Expr, orderOpt: Option[String]) extends Expr {
+  addChild(expr)
+  def desc: String = {
+    orderOpt match {
+      case Some(os) => s"${expr.desc} ${os.toUpperCase}"
+      case _ => s"${expr.desc}"
+    }
+  }
+  def coalesceDesc: String = desc
+}
+
+case class OrderbyClause(items: Seq[OrderbyItem]) extends ClauseExpression {
+
+  addChildren(items.map(_.expr))
+
+  def desc: String = {
+    val obs = items.map(_.desc).mkString(", ")
+    s"ORDER BY ${obs}"
+  }
+  def coalesceDesc: String = {
+    val obs = items.map(_.desc).mkString(", ")
+    s"ORDER BY ${obs}"
+  }
+}
+
+case class LimitClause(expr: Expr) extends ClauseExpression {
+
+  addChild(expr)
+
+  def desc: String = s"LIMIT ${expr.desc}"
+  def coalesceDesc: String = s"LIMIT ${expr.coalesceDesc}"
+}
+
+case class CombinedClause(selectClause: SelectClause, tails: Seq[ClauseExpression]
+                         ) extends ClauseExpression {
+
+  addChildren(selectClause +: tails)
+
+  def desc: String = {
+    tails.foldLeft(selectClause.desc) { (head, tail) =>
+      s"${head} ${tail.desc}"
+    }
+  }
+  def coalesceDesc: String = {
+    tails.foldLeft(selectClause.coalesceDesc) { (head, tail) =>
+      s"${head} ${tail.coalesceDesc}"
+    }
+  }
+}
+
+case class ProfilingClause(selectClause: SelectClause, groupbyClauseOpt: Option[GroupbyClause],
+                           preGroupbyClauses: Seq[ClauseExpression],
+                           postGroupbyClauses: Seq[ClauseExpression]
+                          ) extends ClauseExpression {
+  addChildren(groupbyClauseOpt match {
+    case Some(gc) => (selectClause +: preGroupbyClauses) ++ (gc +: postGroupbyClauses)
+    case _ => (selectClause +: preGroupbyClauses) ++ postGroupbyClauses
+  })
+
+  def desc: String = {
+    val selectDesc = selectClause.desc
+    val groupbyDesc = groupbyClauseOpt.map(_.desc).mkString(" ")
+    val preDesc = preGroupbyClauses.map(_.desc).mkString(" ")
+    val postDesc = postGroupbyClauses.map(_.desc).mkString(" ")
+    s"${selectDesc} ${preDesc} ${groupbyDesc} ${postDesc}"
+  }
+  def coalesceDesc: String = {
+    val selectDesc = selectClause.coalesceDesc
+    val groupbyDesc = groupbyClauseOpt.map(_.coalesceDesc).mkString(" ")
+    val preDesc = preGroupbyClauses.map(_.coalesceDesc).mkString(" ")
+    val postDesc = postGroupbyClauses.map(_.coalesceDesc).mkString(" ")
+    s"${selectDesc} ${preDesc} ${groupbyDesc} ${postDesc}"
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/Expr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/Expr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/Expr.scala
new file mode 100644
index 0000000..850579c
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/Expr.scala
@@ -0,0 +1,29 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait Expr extends TreeNode with Serializable {
+
+  def desc: String
+
+  def coalesceDesc: String
+
+  def extractSelf: Expr = this
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/FunctionExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/FunctionExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/FunctionExpr.scala
new file mode 100644
index 0000000..b82fd96
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/FunctionExpr.scala
@@ -0,0 +1,29 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+case class FunctionExpr(functionName: String, args: Seq[Expr], aliasOpt: Option[String]
+                       ) extends Expr with AliasableExpr {
+
+  addChildren(args)
+
+  def desc: String = s"${functionName}(${args.map(_.desc).mkString(", ")})"
+  def coalesceDesc: String = desc
+  def alias: Option[String] = if (aliasOpt.isEmpty) Some(functionName) else aliasOpt
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LiteralExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LiteralExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LiteralExpr.scala
new file mode 100644
index 0000000..60290bc
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LiteralExpr.scala
@@ -0,0 +1,72 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+import org.apache.griffin.measure.utils.TimeUtil
+
+trait LiteralExpr extends Expr {
+  def coalesceDesc: String = desc
+}
+
+case class LiteralNullExpr(str: String) extends LiteralExpr {
+  def desc: String = "NULL"
+}
+
+case class LiteralNanExpr(str: String) extends LiteralExpr {
+  def desc: String = "NaN"
+}
+
+case class LiteralStringExpr(str: String) extends LiteralExpr {
+  def desc: String = str
+}
+
+case class LiteralNumberExpr(str: String) extends LiteralExpr {
+  def desc: String = {
+    try {
+      if (str.contains(".")) {
+        str.toDouble.toString
+      } else {
+        str.toLong.toString
+      }
+    } catch {
+      case e: Throwable => throw new Exception(s"${str} is invalid number")
+    }
+  }
+}
+
+case class LiteralTimeExpr(str: String) extends LiteralExpr {
+  def desc: String = {
+    TimeUtil.milliseconds(str) match {
+      case Some(t) => t.toString
+      case _ => throw new Exception(s"${str} is invalid time")
+    }
+  }
+}
+
+case class LiteralBooleanExpr(str: String) extends LiteralExpr {
+  final val TrueRegex = """(?i)true""".r
+  final val FalseRegex = """(?i)false""".r
+  def desc: String = {
+    str match {
+      case TrueRegex() => true.toString
+      case FalseRegex() => false.toString
+      case _ => throw new Exception(s"${str} is invalid boolean")
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LogicalExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LogicalExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LogicalExpr.scala
new file mode 100644
index 0000000..4b16219
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/LogicalExpr.scala
@@ -0,0 +1,170 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait LogicalExpr extends Expr {
+}
+
+case class InExpr(head: Expr, is: Boolean, range: Seq[Expr]) extends LogicalExpr {
+
+  addChildren(head +: range)
+
+  def desc: String = {
+    val notStr = if (is) "" else " NOT"
+    s"${head.desc}${notStr} IN (${range.map(_.desc).mkString(", ")})"
+  }
+  def coalesceDesc: String = {
+    val notStr = if (is) "" else " NOT"
+    s"${head.coalesceDesc}${notStr} IN (${range.map(_.coalesceDesc).mkString(", ")})"
+  }
+}
+
+case class BetweenExpr(head: Expr, is: Boolean, range: Seq[Expr]) extends LogicalExpr {
+
+  range match {
+    case first :: second :: _ => addChildren(head :: first :: second :: Nil)
+    case _ => throw new Exception("between expression exception: range less than 2")
+  }
+
+  def desc: String = {
+    val notStr = if (is) "" else " NOT"
+    val rangeStr = range match {
+      case first :: second :: _ => s"${first.desc} AND ${second.desc}"
+      case _ => throw new Exception("between expression exception: range less than 2")
+    }
+    s"${head.desc}${notStr} BETWEEN ${rangeStr}"
+  }
+  def coalesceDesc: String = {
+    val notStr = if (is) "" else " NOT"
+    val rangeStr = range match {
+      case first :: second :: _ => s"${first.coalesceDesc} AND ${second.coalesceDesc}"
+      case _ => throw new Exception("between expression exception: range less than 2")
+    }
+    s"${head.coalesceDesc}${notStr} BETWEEN ${rangeStr}"
+  }
+}
+
+case class LikeExpr(head: Expr, is: Boolean, value: Expr) extends LogicalExpr {
+
+  addChildren(head :: value :: Nil)
+
+  def desc: String = {
+    val notStr = if (is) "" else " NOT"
+    s"${head.desc}${notStr} LIKE ${value.desc}"
+  }
+  def coalesceDesc: String = {
+    val notStr = if (is) "" else " NOT"
+    s"${head.coalesceDesc}${notStr} LIKE ${value.coalesceDesc}"
+  }
+}
+
+case class IsNullExpr(head: Expr, is: Boolean) extends LogicalExpr {
+
+  addChild(head)
+
+  def desc: String = {
+    val notStr = if (is) "" else " NOT"
+    s"${head.desc} IS${notStr} NULL"
+  }
+  def coalesceDesc: String = desc
+}
+
+case class IsNanExpr(head: Expr, is: Boolean) extends LogicalExpr {
+
+  addChild(head)
+
+  def desc: String = {
+    val notStr = if (is) "" else "NOT "
+    s"${notStr}isnan(${head.desc})"
+  }
+  def coalesceDesc: String = desc
+}
+
+// -----------
+
+case class LogicalFactorExpr(factor: Expr, withBracket: Boolean, aliasOpt: Option[String]
+                            ) extends LogicalExpr with AliasableExpr {
+
+  addChild(factor)
+
+  def desc: String = if (withBracket) s"(${factor.desc})" else factor.desc
+  def coalesceDesc: String = factor.coalesceDesc
+  def alias: Option[String] = aliasOpt
+  override def extractSelf: Expr = {
+    if (aliasOpt.nonEmpty) this
+    else factor.extractSelf
+  }
+}
+
+case class UnaryLogicalExpr(oprs: Seq[String], factor: LogicalExpr) extends LogicalExpr {
+
+  addChild(factor)
+
+  def desc: String = {
+    oprs.foldRight(factor.desc) { (opr, fac) =>
+      s"(${trans(opr)} ${fac})"
+    }
+  }
+  def coalesceDesc: String = {
+    oprs.foldRight(factor.coalesceDesc) { (opr, fac) =>
+      s"(${trans(opr)} ${fac})"
+    }
+  }
+  private def trans(s: String): String = {
+    s match {
+      case "!" => "NOT"
+      case _ => s.toUpperCase
+    }
+  }
+  override def extractSelf: Expr = {
+    if (oprs.nonEmpty) this
+    else factor.extractSelf
+  }
+}
+
+case class BinaryLogicalExpr(factor: LogicalExpr, tails: Seq[(String, LogicalExpr)]) extends LogicalExpr {
+
+  addChildren(factor +: tails.map(_._2))
+
+  def desc: String = {
+    val res = tails.foldLeft(factor.desc) { (fac, tail) =>
+      val (opr, expr) = tail
+      s"${fac} ${trans(opr)} ${expr.desc}"
+    }
+    if (tails.size <= 0) res else s"${res}"
+  }
+  def coalesceDesc: String = {
+    val res = tails.foldLeft(factor.coalesceDesc) { (fac, tail) =>
+      val (opr, expr) = tail
+      s"${fac} ${trans(opr)} ${expr.coalesceDesc}"
+    }
+    if (tails.size <= 0) res else s"${res}"
+  }
+  private def trans(s: String): String = {
+    s match {
+      case "&&" => "AND"
+      case "||" => "OR"
+      case _ => s.trim.toUpperCase
+    }
+  }
+  override def extractSelf: Expr = {
+    if (tails.nonEmpty) this
+    else factor.extractSelf
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/MathExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/MathExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/MathExpr.scala
new file mode 100644
index 0000000..b3d3db4
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/MathExpr.scala
@@ -0,0 +1,80 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait MathExpr extends Expr {
+}
+
+case class MathFactorExpr(factor: Expr, withBracket: Boolean, aliasOpt: Option[String]
+                         ) extends MathExpr with AliasableExpr {
+
+  addChild(factor)
+
+  def desc: String = if (withBracket) s"(${factor.desc})" else factor.desc
+  def coalesceDesc: String = factor.coalesceDesc
+  def alias: Option[String] = aliasOpt
+  override def extractSelf: Expr = {
+    if (aliasOpt.nonEmpty) this
+    else factor.extractSelf
+  }
+}
+
+case class UnaryMathExpr(oprs: Seq[String], factor: MathExpr) extends MathExpr {
+
+  addChild(factor)
+
+  def desc: String = {
+    oprs.foldRight(factor.desc) { (opr, fac) =>
+      s"(${opr}${fac})"
+    }
+  }
+  def coalesceDesc: String = {
+    oprs.foldRight(factor.coalesceDesc) { (opr, fac) =>
+      s"(${opr}${fac})"
+    }
+  }
+  override def extractSelf: Expr = {
+    if (oprs.nonEmpty) this
+    else factor.extractSelf
+  }
+}
+
+case class BinaryMathExpr(factor: MathExpr, tails: Seq[(String, MathExpr)]) extends MathExpr {
+
+  addChildren(factor +: tails.map(_._2))
+
+  def desc: String = {
+    val res = tails.foldLeft(factor.desc) { (fac, tail) =>
+      val (opr, expr) = tail
+      s"${fac} ${opr} ${expr.desc}"
+    }
+    if (tails.size <= 0) res else s"${res}"
+  }
+  def coalesceDesc: String = {
+    val res = tails.foldLeft(factor.coalesceDesc) { (fac, tail) =>
+      val (opr, expr) = tail
+      s"${fac} ${opr} ${expr.coalesceDesc}"
+    }
+    if (tails.size <= 0) res else s"${res}"
+  }
+  override def extractSelf: Expr = {
+    if (tails.nonEmpty) this
+    else factor.extractSelf
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/SelectExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/SelectExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/SelectExpr.scala
new file mode 100644
index 0000000..fd803a8
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/SelectExpr.scala
@@ -0,0 +1,115 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait HeadExpr extends Expr {
+
+}
+
+case class DataSourceHeadExpr(name: String) extends HeadExpr {
+  def desc: String = name
+  def coalesceDesc: String = desc
+}
+
+case class FieldNameHeadExpr(field: String) extends HeadExpr {
+  def desc: String = field
+  def coalesceDesc: String = desc
+}
+
+case class ALLSelectHeadExpr() extends HeadExpr {
+  def desc: String = "*"
+  def coalesceDesc: String = desc
+}
+
+case class OtherHeadExpr(expr: Expr) extends HeadExpr {
+
+  addChild(expr)
+
+  def desc: String = expr.desc
+  def coalesceDesc: String = expr.coalesceDesc
+}
+
+// -------------
+
+trait SelectExpr extends Expr with AliasableExpr {
+}
+
+case class AllFieldsSelectExpr() extends SelectExpr {
+  def desc: String = s".*"
+  def coalesceDesc: String = desc
+  def alias: Option[String] = None
+}
+
+case class FieldSelectExpr(field: String) extends SelectExpr {
+  def desc: String = s".${field}"
+  def coalesceDesc: String = desc
+  def alias: Option[String] = Some(field)
+}
+
+case class IndexSelectExpr(index: Expr) extends SelectExpr {
+
+  addChild(index)
+
+  def desc: String = s"[${index.desc}]"
+  def coalesceDesc: String = desc
+  def alias: Option[String] = Some(desc)
+}
+
+case class FunctionSelectExpr(functionName: String, args: Seq[Expr]) extends SelectExpr {
+
+  addChildren(args)
+
+  def desc: String = ""
+  def coalesceDesc: String = desc
+  def alias: Option[String] = Some(functionName)
+}
+
+// -------------
+
+case class SelectionExpr(head: HeadExpr, selectors: Seq[SelectExpr], aliasOpt: Option[String]) extends SelectExpr {
+
+  addChildren(head +: selectors)
+
+  def desc: String = {
+    selectors.foldLeft(head.desc) { (hd, sel) =>
+      sel match {
+        case FunctionSelectExpr(funcName, args) => {
+          val nargs = hd +: args.map(_.desc)
+          s"${funcName}(${nargs.mkString(", ")})"
+        }
+        case _ => s"${hd}${sel.desc}"
+      }
+    }
+  }
+  def coalesceDesc: String = {
+    selectors.lastOption match {
+      case None => desc
+      case Some(sel: FunctionSelectExpr) => desc
+      case _ => s"coalesce(${desc}, 'null')"
+    }
+  }
+  def alias: Option[String] = {
+    if (aliasOpt.isEmpty) {
+      selectors.lastOption match {
+        case Some(last) => last.alias
+        case _ => None
+      }
+    } else aliasOpt
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/TreeNode.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/TreeNode.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/TreeNode.scala
new file mode 100644
index 0000000..aab16b4
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/expr/TreeNode.scala
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.expr
+
+trait TreeNode extends Serializable {
+
+  var children = Seq[TreeNode]()
+
+  def addChild(expr: TreeNode) = { children :+= expr }
+  def addChildren(exprs: Seq[TreeNode]) = { children ++= exprs }
+
+  def preOrderTraverseDepthFirst[T, A <: TreeNode](z: T)(seqOp: (A, T) => T, combOp: (T, T) => T): T = {
+    if (this.isInstanceOf[A]) {
+      val tv = seqOp(this.asInstanceOf[A], z)
+      children.foldLeft(combOp(z, tv)) { (ov, tn) =>
+        combOp(ov, tn.preOrderTraverseDepthFirst(z)(seqOp, combOp))
+      }
+    } else z
+  }
+  def postOrderTraverseDepthFirst[T, A <: TreeNode](z: T)(seqOp: (A, T) => T, combOp: (T, T) => T): T = {
+    if (this.isInstanceOf[A]) {
+      val cv = children.foldLeft(z) { (ov, tn) =>
+        combOp(ov, tn.postOrderTraverseDepthFirst(z)(seqOp, combOp))
+      }
+      combOp(z, seqOp(this.asInstanceOf[A], cv))
+    } else z
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParser.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParser.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParser.scala
new file mode 100644
index 0000000..0431354
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/BasicParser.scala
@@ -0,0 +1,337 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.parser
+
+import org.apache.griffin.measure.rule.dsl.expr._
+
+import scala.util.parsing.combinator.JavaTokenParsers
+
+trait BasicParser extends JavaTokenParsers with Serializable {
+
+  val dataSourceNames: Seq[String]
+  val functionNames: Seq[String]
+
+  /**
+    * BNF for basic parser
+    *
+    * -- literal --
+    * <literal> ::= <literal-string> | <literal-number> | <literal-time> | <literal-boolean> | <literal-null> | <literal-nan>
+    * <literal-string> ::= <any-string>
+    * <literal-number> ::= <integer> | <double>
+    * <literal-time> ::= <integer> ("d"|"h"|"m"|"s"|"ms")
+    * <literal-boolean> ::= true | false
+    * <literal-null> ::= null
+    * <literal-nan> ::= nan
+    *
+    * -- selection --
+    * <selection> ::= <selection-head> [ <field-sel> | <index-sel> | <function-sel> ]*
+    * <selection-head> ::= ("data source name registered") | <function>
+    * <field-sel> ::= "." <field-name> | "[" <quote-field-name> "]"
+    * <index-sel> ::= "[" <arg> "]"
+    * <function-sel> ::= "." <function-name> "(" [<arg>]? [, <arg>]* ")"
+    * <arg> ::= <math-expr>
+    *
+    * -- math expr --
+    * <math-factor> ::= <literal> | <alias-expr> | <function> | <selection> | "(" <math-expr> ")"
+    * <unary-math-expr> ::= [<unary-opr>]* <math-factor>
+    * <binary-math-expr> ::= <unary-math-expr> [<binary-opr> <unary-math-expr>]+
+    * <math-expr> ::= <binary-math-expr>
+    *
+    * -- logical expr --
+    * <in-expr> ::= <math-expr> [<not>]? <in> <range-expr>
+    * <between-expr> ::= <math-expr> [<not>]? <between> (<math-expr> <and> <math-expr> | <range-expr>)
+    * <range-expr> ::= "(" [<math-expr>]? [, <math-expr>]+ ")"
+    * <like-expr> ::= <math-expr> [<not>]? <like> <math-expr>
+    * <is-null-expr> ::= <math-expr> <is> [<not>]? <null>
+    * <is-nan-expr> ::= <math-expr> <is> [<not>]? <nan>
+    *
+    * <logical-factor> ::= <math-expr> | <in-expr> | <between-expr> | <like-expr> | <is-null-expr> | <is-nan-expr> | "(" <logical-expr> ")"
+    * <unary-logical-expr> ::= [<unary-logical-opr>]* <logical-factor>
+    * <binary-logical-expr> ::= <unary-logical-expr> [<binary-logical-opr> <unary-logical-expr>]+
+    * <logical-expr> ::= <binary-logical-expr>
+    *
+    * -- expression --
+    * <expr> = <math-expr> | <logical-expr>
+    *
+    * -- function expr --
+    * <function> ::= <function-name> "(" [<arg>] [, <arg>]+ ")"
+    * <function-name> ::= ("function name registered")
+    * <arg> ::= <expr>
+    *
+    * -- alias expr --
+    * <alias-expr> = <expr> <as> <name>
+    */
+
+  protected def genNamesParser(names: Seq[String]): Parser[String] = {
+    names.reverse.map {
+      fn => s"""(?i)${fn}""".r: Parser[String]
+    }.reduce(_ | _)
+  }
+
+  object Literal {
+    val NULL: Parser[String] = """(?i)null""".r
+    val NAN: Parser[String] = """(?i)nan""".r
+  }
+  import Literal._
+
+  object Operator {
+    val MATH_UNARY: Parser[String] = "+" | "-"
+    val MATH_BINARIES: Seq[Parser[String]] = Seq(("*" | "/" | "%"), ("+" | "-"))
+
+    val NOT: Parser[String] = """(?i)not\s""".r | "!"
+    val AND: Parser[String] = """(?i)and\s""".r | "&&"
+    val OR: Parser[String] = """(?i)or\s""".r | "||"
+    val IN: Parser[String] = """(?i)in\s""".r
+    val BETWEEN: Parser[String] = """(?i)between\s""".r
+    val AND_ONLY: Parser[String] = """(?i)and\s""".r
+    val IS: Parser[String] = """(?i)is\s""".r
+    val LIKE: Parser[String] = """(?i)like\s""".r
+    val COMPARE: Parser[String] = "=" | "!=" | "<>" | "<=" | ">=" | "<" | ">"
+    val LOGICAL_UNARY: Parser[String] = NOT
+    val LOGICAL_BINARIES: Seq[Parser[String]] = Seq((COMPARE), (AND), (OR))
+
+    val LSQBR: Parser[String] = "["
+    val RSQBR: Parser[String] = "]"
+    val LBR: Parser[String] = "("
+    val RBR: Parser[String] = ")"
+
+    val DOT: Parser[String] = "."
+    val ALLSL: Parser[String] = "*"
+    val SQUOTE: Parser[String] = "'"
+    val DQUOTE: Parser[String] = "\""
+    val UQUOTE: Parser[String] = "`"
+    val COMMA: Parser[String] = ","
+
+    val AS: Parser[String] = """(?i)as\s""".r
+    val WHERE: Parser[String] = """(?i)where\s""".r
+    val GROUP: Parser[String] = """(?i)group\s""".r
+    val ORDER: Parser[String] = """(?i)order\s""".r
+    val BY: Parser[String] = """(?i)by\s""".r
+    val DESC: Parser[String] = """(?i)desc""".r
+    val ASC: Parser[String] = """(?i)asc""".r
+    val HAVING: Parser[String] = """(?i)having\s""".r
+    val LIMIT: Parser[String] = """(?i)limit\s""".r
+  }
+  import Operator._
+
+  object Strings {
+    def AnyString: Parser[String] = """"(?:[^\"]|\")*"""".r | """'(?:[^']|\')*'""".r
+    def UQuoteTableFieldName: Parser[String] = """`(?:[^`]|[\\][`])*`""".r
+    def TableFieldName: Parser[String] = UQuoteTableFieldName | """[a-zA-Z_]\w*""".r
+    def DataSourceName: Parser[String] = genNamesParser(dataSourceNames)
+    def FunctionName: Parser[String] = genNamesParser(functionNames)
+
+    def IntegerNumber: Parser[String] = """[+\-]?\d+""".r
+    def DoubleNumber: Parser[String] = """[+\-]?(\.\d+|\d+\.\d*)""".r
+    def IndexNumber: Parser[String] = IntegerNumber
+
+    def TimeString: Parser[String] = """([+\-]?\d+)(d|h|m|s|ms)""".r
+    def BooleanString: Parser[String] = """(?i)true|false""".r
+  }
+  import Strings._
+
+  /**
+    * -- literal --
+    * <literal> ::= <literal-string> | <literal-number> | <literal-time> | <literal-boolean> | <literal-null> | <literal-nan>
+    * <literal-string> ::= <any-string>
+    * <literal-number> ::= <integer> | <double>
+    * <literal-time> ::= <integer> ("d"|"h"|"m"|"s"|"ms")
+    * <literal-boolean> ::= true | false
+    * <literal-null> ::= null
+    * <literal-nan> ::= nan
+    */
+  def literal: Parser[LiteralExpr] = literalNull | literalNan | literalBoolean | literalString | literalTime | literalNumber
+  def literalNull: Parser[LiteralNullExpr] = NULL ^^ { LiteralNullExpr(_) }
+  def literalNan: Parser[LiteralNanExpr] = NAN ^^ { LiteralNanExpr(_) }
+  def literalString: Parser[LiteralStringExpr] = AnyString ^^ { LiteralStringExpr(_) }
+  def literalNumber: Parser[LiteralNumberExpr] = (DoubleNumber | IntegerNumber) ^^ { LiteralNumberExpr(_) }
+  def literalTime: Parser[LiteralTimeExpr] = TimeString ^^ { LiteralTimeExpr(_) }
+  def literalBoolean: Parser[LiteralBooleanExpr] = BooleanString ^^ { LiteralBooleanExpr(_) }
+
+  /**
+    * -- selection --
+    * <selection> ::= <selection-head> [ <field-sel> | <index-sel> | <function-sel> ]*
+    * <selection-head> ::= ("data source name registered") | <function>
+    * <field-sel> ::= "." <field-name> | "[" <quote-field-name> "]"
+    * <index-sel> ::= "[" <arg> "]"
+    * <function-sel> ::= "." <function-name> "(" [<arg>]? [, <arg>]* ")"
+    * <arg> ::= <math-expr>
+    */
+
+  def selection: Parser[SelectionExpr] = selectionHead ~ rep(selector) ~ opt(asAlias) ^^ {
+    case head ~ sels ~ aliasOpt => SelectionExpr(head, sels, aliasOpt)
+  }
+  def selectionHead: Parser[HeadExpr] = DataSourceName ^^ {
+    DataSourceHeadExpr(_)
+  } | function ^^ {
+    OtherHeadExpr(_)
+  } | TableFieldName ^^ {
+    FieldNameHeadExpr(_)
+  } | ALLSL ^^ { _ =>
+    ALLSelectHeadExpr()
+  }
+  def selector: Parser[SelectExpr] = functionSelect | allFieldsSelect | fieldSelect | indexSelect
+  def allFieldsSelect: Parser[AllFieldsSelectExpr] = DOT ~> ALLSL ^^ { _ => AllFieldsSelectExpr() }
+  def fieldSelect: Parser[FieldSelectExpr] = DOT ~> TableFieldName ^^ { FieldSelectExpr(_) }
+  def indexSelect: Parser[IndexSelectExpr] = LSQBR ~> argument <~ RSQBR ^^ { IndexSelectExpr(_) }
+  def functionSelect: Parser[FunctionSelectExpr] = DOT ~ FunctionName ~ LBR ~ repsep(argument, COMMA) ~ RBR ^^ {
+    case _ ~ name ~ _ ~ args ~ _ => FunctionSelectExpr(name, args)
+  }
+
+  def asAlias: Parser[String] = AS ~> TableFieldName
+
+  /**
+    * -- math expr --
+    * <math-factor> ::= <literal> | <alias-expr> | <function> | <selection> | "(" <math-expr> ")"
+    * <unary-math-expr> ::= [<unary-opr>]* <math-factor>
+    * <binary-math-expr> ::= <unary-math-expr> [<binary-opr> <unary-math-expr>]+
+    * <math-expr> ::= <binary-math-expr>
+    */
+
+  def mathFactor: Parser[MathExpr] = (literal | function | selection) ^^ {
+    MathFactorExpr(_, false, None)
+  } | LBR ~ mathExpression ~ RBR ~ opt(asAlias) ^^ {
+    case _ ~ expr ~ _ ~ aliasOpt => MathFactorExpr(expr, true, aliasOpt)
+  }
+  def unaryMathExpression: Parser[MathExpr] = rep(MATH_UNARY) ~ mathFactor ^^ {
+    case Nil ~ a => a
+    case list ~ a => UnaryMathExpr(list, a)
+  }
+  def binaryMathExpressions: Seq[Parser[MathExpr]] =
+    MATH_BINARIES.foldLeft(List[Parser[MathExpr]](unaryMathExpression)) { (parsers, binaryParser) =>
+      val pre = parsers.head
+      val cur = pre ~ rep(binaryParser ~ pre) ^^ {
+        case a ~ Nil => a
+        case a ~ list => BinaryMathExpr(a, list.map(c => (c._1, c._2)))
+      }
+      cur :: parsers
+    }
+  def mathExpression: Parser[MathExpr] = binaryMathExpressions.head
+
+  /**
+    * -- logical expr --
+    * <in-expr> ::= <math-expr> [<not>]? <in> <range-expr>
+    * <between-expr> ::= <math-expr> [<not>]? <between> (<math-expr> <and> <math-expr> | <range-expr>)
+    * <range-expr> ::= "(" [<math-expr>]? [, <math-expr>]+ ")"
+    * <like-expr> ::= <math-expr> [<not>]? <like> <math-expr>
+    * <is-null-expr> ::= <math-expr> <is> [<not>]? <null>
+    * <is-nan-expr> ::= <math-expr> <is> [<not>]? <nan>
+    *
+    * <logical-factor> ::= <math-expr> | <in-expr> | <between-expr> | <like-expr> | <is-null-expr> | <is-nan-expr> | "(" <logical-expr> ")"
+    * <unary-logical-expr> ::= [<unary-logical-opr>]* <logical-factor>
+    * <binary-logical-expr> ::= <unary-logical-expr> [<binary-logical-opr> <unary-logical-expr>]+
+    * <logical-expr> ::= <binary-logical-expr>
+    */
+
+  def inExpr: Parser[LogicalExpr] = mathExpression ~ opt(NOT) ~ IN ~ LBR ~ repsep(mathExpression, COMMA) ~ RBR ^^ {
+    case head ~ notOpt ~ _ ~ _ ~ list ~ _ => InExpr(head, notOpt.isEmpty, list)
+  }
+  def betweenExpr: Parser[LogicalExpr] = mathExpression ~ opt(NOT) ~ BETWEEN ~ LBR ~ repsep(mathExpression, COMMA) ~ RBR ^^ {
+    case head ~ notOpt ~ _ ~ _ ~ list ~ _ => BetweenExpr(head, notOpt.isEmpty, list)
+  } | mathExpression ~ opt(NOT) ~ BETWEEN ~ mathExpression ~ AND_ONLY ~ mathExpression ^^ {
+    case head ~ notOpt ~ _ ~ first ~ _ ~ second => BetweenExpr(head, notOpt.isEmpty, Seq(first, second))
+  }
+  def likeExpr: Parser[LogicalExpr] = mathExpression ~ opt(NOT) ~ LIKE ~ mathExpression ^^ {
+    case head ~ notOpt ~ _ ~ value => LikeExpr(head, notOpt.isEmpty, value)
+  }
+  def isNullExpr: Parser[LogicalExpr] = mathExpression ~ IS ~ opt(NOT) ~ NULL ^^ {
+    case head ~ _ ~ notOpt ~ _ => IsNullExpr(head, notOpt.isEmpty)
+  }
+  def isNanExpr: Parser[LogicalExpr] = mathExpression ~ IS ~ opt(NOT) ~ NAN ^^ {
+    case head ~ _ ~ notOpt ~ _ => IsNanExpr(head, notOpt.isEmpty)
+  }
+
+  def logicalFactor: Parser[LogicalExpr] = (inExpr | betweenExpr | likeExpr | isNullExpr | isNanExpr | mathExpression) ^^ {
+    LogicalFactorExpr(_, false, None)
+  } | LBR ~ logicalExpression ~ RBR ~ opt(asAlias) ^^ {
+    case _ ~ expr ~ _ ~ aliasOpt => LogicalFactorExpr(expr, true, aliasOpt)
+  }
+  def unaryLogicalExpression: Parser[LogicalExpr] = rep(LOGICAL_UNARY) ~ logicalFactor ^^ {
+    case Nil ~ a => a
+    case list ~ a => UnaryLogicalExpr(list, a)
+  }
+  def binaryLogicalExpressions: Seq[Parser[LogicalExpr]] =
+    LOGICAL_BINARIES.foldLeft(List[Parser[LogicalExpr]](unaryLogicalExpression)) { (parsers, binaryParser) =>
+      val pre = parsers.head
+      val cur = pre ~ rep(binaryParser ~ pre) ^^ {
+        case a ~ Nil => a
+        case a ~ list => BinaryLogicalExpr(a, list.map(c => (c._1, c._2)))
+      }
+      cur :: parsers
+    }
+  def logicalExpression: Parser[LogicalExpr] = binaryLogicalExpressions.head
+
+  /**
+    * -- expression --
+    * <expr> = <math-expr> | <logical-expr>
+    */
+
+  def expression: Parser[Expr] = logicalExpression | mathExpression
+
+  /**
+    * -- function expr --
+    * <function> ::= <function-name> "(" [<arg>] [, <arg>]+ ")"
+    * <function-name> ::= ("function name registered")
+    * <arg> ::= <expr>
+    */
+
+  def function: Parser[FunctionExpr] = FunctionName ~ LBR ~ repsep(argument, COMMA) ~ RBR ~ opt(asAlias) ^^ {
+    case name ~ _ ~ args ~ _ ~ aliasOpt => FunctionExpr(name, args, aliasOpt)
+  }
+  def argument: Parser[Expr] = expression
+
+  /**
+    * -- clauses --
+    * <select-clause> = <expr> [, <expr>]*
+    * <where-clause> = <where> <expr>
+    * <having-clause> = <having> <expr>
+    * <groupby-clause> = <group> <by> <expr> [ <having-clause> ]?
+    * <orderby-item> = <expr> [ <DESC> ]?
+    * <orderby-clause> = <order> <by> <orderby-item> [ , <orderby-item> ]*
+    * <limit-clause> = <limit> <expr>
+    */
+
+  def selectClause: Parser[SelectClause] = rep1sep(expression, COMMA) ^^ { SelectClause(_) }
+  def whereClause: Parser[WhereClause] = WHERE ~> expression ^^ { WhereClause(_) }
+  def havingClause: Parser[Expr] = HAVING ~> expression
+  def groupbyClause: Parser[GroupbyClause] = GROUP ~ BY ~ rep1sep(expression, COMMA) ~ opt(havingClause) ^^ {
+    case _ ~ _ ~ cols ~ havingOpt => GroupbyClause(cols, havingOpt)
+  }
+  def orderbyItem: Parser[OrderbyItem] = expression ~ opt(DESC | ASC) ^^ {
+    case expr ~ orderOpt => OrderbyItem(expr, orderOpt)
+  }
+  def orderbyClause: Parser[OrderbyClause] = ORDER ~ BY ~ rep1sep(orderbyItem, COMMA) ^^ {
+    case _ ~ _ ~ cols => OrderbyClause(cols)
+  }
+  def limitClause: Parser[LimitClause] = LIMIT ~> expression ^^ { LimitClause(_) }
+
+  /**
+    * -- combined clauses --
+    * <combined-clauses> = <select-clause> [ <where-clause> ]+ [ <groupby-clause> ]+ [ <orderby-clause> ]+ [ <limit-clause> ]+
+    */
+
+  def combinedClause: Parser[CombinedClause] = selectClause ~ opt(whereClause) ~
+    opt(groupbyClause) ~ opt(orderbyClause) ~ opt(limitClause) ^^ {
+    case sel ~ whereOpt ~ groupbyOpt ~ orderbyOpt ~ limitOpt => {
+      val tails = Seq(whereOpt, groupbyOpt, orderbyOpt, limitOpt).flatMap(opt => opt)
+      CombinedClause(sel, tails)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/GriffinDslParser.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/GriffinDslParser.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/GriffinDslParser.scala
new file mode 100644
index 0000000..637decb
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/rule/dsl/parser/GriffinDslParser.scala
@@ -0,0 +1,50 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.rule.dsl.parser
+
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.dsl.expr._
+
+case class GriffinDslParser(dataSourceNames: Seq[String], functionNames: Seq[String]
+                           ) extends BasicParser {
+
+  /**
+    * -- profiling clauses --
+    * <profiling-clauses> = <select-clause> [ <where-clause> ]+ [ <groupby-clause> ]+ [ <orderby-clause> ]+ [ <limit-clause> ]+
+    */
+
+  def profilingClause: Parser[ProfilingClause] = selectClause ~ opt(whereClause) ~
+    opt(groupbyClause) ~ opt(orderbyClause) ~ opt(limitClause) ^^ {
+    case sel ~ whereOpt ~ groupbyOpt ~ orderbyOpt ~ limitOpt => {
+      val preClauses = Seq(whereOpt).flatMap(opt => opt)
+      val postClauses = Seq(orderbyOpt, limitOpt).flatMap(opt => opt)
+      ProfilingClause(sel, groupbyOpt, preClauses, postClauses)
+    }
+  }
+
+  def parseRule(rule: String, dqType: DqType): ParseResult[Expr] = {
+    val rootExpr = dqType match {
+      case AccuracyType => logicalExpression
+      case ProfilingType => profilingClause
+      case _ => expression
+    }
+    parseAll(rootExpr, rule)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/AnalyzableExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/AnalyzableExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/AnalyzableExpr.scala
deleted file mode 100644
index aefcaad..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/AnalyzableExpr.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-
-trait AnalyzableExpr extends Serializable {
-  def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = Nil
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Cacheable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Cacheable.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Cacheable.scala
deleted file mode 100644
index feb8156..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Cacheable.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-trait Cacheable extends DataSourceable {
-  protected def cacheUnit: Boolean = false
-  def cacheable(ds: String): Boolean = {
-    cacheUnit && !conflict() && ((ds.isEmpty && dataSources.isEmpty) || (ds.nonEmpty && contains(ds)))
-  }
-  protected def getCacheExprs(ds: String): Iterable[Cacheable]
-
-  protected def persistUnit: Boolean = false
-  def persistable(ds: String): Boolean = {
-    persistUnit && ((ds.isEmpty && dataSources.isEmpty) || (ds.nonEmpty && contains(ds)))
-  }
-  protected def getPersistExprs(ds: String): Iterable[Cacheable]
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Calculatable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Calculatable.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Calculatable.scala
deleted file mode 100644
index 904e823..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Calculatable.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-trait Calculatable extends Serializable {
-
-  def calculate(values: Map[String, Any]): Option[Any]
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ClauseExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ClauseExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ClauseExpr.scala
deleted file mode 100644
index a56e0db..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ClauseExpr.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-
-trait ClauseExpr extends Expr with AnalyzableExpr {
-  def valid(values: Map[String, Any]): Boolean = true
-  override def cacheUnit: Boolean = true
-}
-
-case class WhereClauseExpr(expr: LogicalExpr) extends ClauseExpr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = expr.calculate(values)
-  val desc: String = expr.desc
-  val dataSources: Set[String] = expr.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    expr.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    expr.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    expr.getPersistExprs(ds)
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = expr.getGroupbyExprPairs(dsPair)
-}
-
-case class WhenClauseExpr(expr: LogicalExpr) extends ClauseExpr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = expr.calculate(values)
-  val desc: String = s"WHEN ${expr.desc}"
-  val dataSources: Set[String] = expr.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    expr.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    expr.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    expr.getPersistExprs(ds)
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = expr.getGroupbyExprPairs(dsPair)
-}
-
-case class StatementExpr(whereClause: WhereClauseExpr, whenClauseOpt: Option[WhenClauseExpr]) extends ClauseExpr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = whereClause.calculate(values)
-  val desc: String = {
-    whenClauseOpt match {
-      case Some(expr) => s"${whereClause.desc} ${expr.desc}"
-      case _ => whereClause.desc
-    }
-  }
-  val dataSources: Set[String] = whereClause.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    whereClause.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    whereClause.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    whereClause.getPersistExprs(ds)
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = whereClause.getGroupbyExprPairs(dsPair)
-}
-
-//case class WhenClauseStatementExpr(expr: LogicalExpr, whenExpr: LogicalExpr) extends ClauseExpr {
-//  def calculateOnly(values: Map[String, Any]): Option[Any] = expr.calculate(values)
-//  val desc: String = s"${expr.desc} when ${whenExpr.desc}"
-//
-//  override def valid(values: Map[String, Any]): Boolean = {
-//    whenExpr.calculate(values) match {
-//      case Some(r: Boolean) => r
-//      case _ => false
-//    }
-//  }
-//
-//  val dataSources: Set[String] = expr.dataSources ++ whenExpr.dataSources
-//  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-//    expr.getCacheExprs(ds) ++ whenExpr.getCacheExprs(ds)
-//  }
-//  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-//    expr.getFinalCacheExprs(ds) ++ whenExpr.getFinalCacheExprs(ds)
-//  }
-//  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-//    expr.getPersistExprs(ds) ++ whenExpr.getPersistExprs(ds)
-//  }
-//
-//  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = {
-//    expr.getGroupbyExprPairs(dsPair) ++ whenExpr.getGroupbyExprPairs(dsPair)
-//  }
-//  override def getWhenClauseExpr(): Option[LogicalExpr] = Some(whenExpr)
-//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/DataSourceable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/DataSourceable.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/DataSourceable.scala
deleted file mode 100644
index e2cf172..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/DataSourceable.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-trait DataSourceable extends Serializable {
-  val dataSources: Set[String]
-  protected def conflict(): Boolean = dataSources.size > 1
-  def contains(ds: String): Boolean = dataSources.contains(ds)
-  def dataSourceOpt: Option[String] = {
-    if (dataSources.size == 1) Some(dataSources.head) else None
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Describable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Describable.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Describable.scala
deleted file mode 100644
index 393d7a6..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Describable.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-trait Describable extends Serializable {
-
-  val desc: String
-
-  protected def describe(v: Any): String = {
-    v match {
-      case s: Describable => s"${s.desc}"
-      case s: String => s"'${s}'"
-      case a => s"${a}"
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Expr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Expr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Expr.scala
deleted file mode 100644
index 726b5b6..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/Expr.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import org.apache.spark.sql.types.DataType
-
-trait Expr extends Serializable with Describable with Cacheable with Calculatable {
-
-  protected val _defaultId: String = ExprIdCounter.emptyId
-
-  val _id = ExprIdCounter.genId(_defaultId)
-
-  protected def getSubCacheExprs(ds: String): Iterable[Expr] = Nil
-  final def getCacheExprs(ds: String): Iterable[Expr] = {
-    if (cacheable(ds)) getSubCacheExprs(ds).toList :+ this else getSubCacheExprs(ds)
-  }
-
-  protected def getSubFinalCacheExprs(ds: String): Iterable[Expr] = Nil
-  final def getFinalCacheExprs(ds: String): Iterable[Expr] = {
-    if (cacheable(ds)) Nil :+ this else getSubFinalCacheExprs(ds)
-  }
-
-  protected def getSubPersistExprs(ds: String): Iterable[Expr] = Nil
-  final def getPersistExprs(ds: String): Iterable[Expr] = {
-    if (persistable(ds)) getSubPersistExprs(ds).toList :+ this else getSubPersistExprs(ds)
-  }
-
-  final def calculate(values: Map[String, Any]): Option[Any] = {
-    values.get(_id) match {
-      case Some(v) => Some(v)
-      case _ => calculateOnly(values)
-    }
-  }
-  protected def calculateOnly(values: Map[String, Any]): Option[Any]
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprDescOnly.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprDescOnly.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprDescOnly.scala
deleted file mode 100644
index 01b7e3c..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprDescOnly.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-trait ExprDescOnly extends Describable {
-
-}
-
-
-case class SelectionHead(expr: String) extends ExprDescOnly {
-  private val headRegex = """\$(\w+)""".r
-  val head: String = expr match {
-    case headRegex(v) => v.toLowerCase
-    case _ => expr
-  }
-  val desc: String = "$" + head
-}
-
-case class RangeDesc(elements: Iterable[MathExpr]) extends ExprDescOnly {
-  val desc: String = {
-    val rangeDesc = elements.map(_.desc).mkString(", ")
-    s"(${rangeDesc})"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprIdCounter.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprIdCounter.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprIdCounter.scala
deleted file mode 100644
index ae76aef..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/ExprIdCounter.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import java.util.concurrent.atomic.AtomicLong
-
-import scala.collection.mutable.{Set => MutableSet}
-
-object ExprIdCounter {
-
-  private val idCounter: AtomicLong = new AtomicLong(0L)
-
-  private val existIdSet: MutableSet[String] = MutableSet.empty[String]
-
-  private val invalidIdRegex = """^\d+$""".r
-
-  val emptyId: String = ""
-
-  def genId(defaultId: String): String = {
-    defaultId match {
-      case emptyId => increment.toString
-      case invalidIdRegex() => increment.toString
-//      case defId if (exist(defId)) => s"${increment}#${defId}"
-      case defId if (exist(defId)) => s"${defId}"
-      case _ => {
-        insertUserId(defaultId)
-        defaultId
-      }
-    }
-  }
-
-  private def exist(id: String): Boolean = {
-    existIdSet.contains(id)
-  }
-
-  private def insertUserId(id: String): Unit = {
-    existIdSet += id
-  }
-
-  private def increment(): Long = {
-    idCounter.incrementAndGet()
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/FieldDescOnly.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/FieldDescOnly.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/FieldDescOnly.scala
deleted file mode 100644
index dca037b..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/FieldDescOnly.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import scala.util.{Success, Try}
-
-trait FieldDescOnly extends Describable with DataSourceable {
-
-}
-
-case class IndexDesc(expr: String) extends FieldDescOnly {
-  val index: Int = {
-    Try(expr.toInt) match {
-      case Success(v) => v
-      case _ => throw new Exception(s"${expr} is invalid index")
-    }
-  }
-  val desc: String = describe(index)
-  val dataSources: Set[String] = Set.empty[String]
-}
-
-case class FieldDesc(expr: String) extends FieldDescOnly {
-  val field: String = expr
-  val desc: String = describe(field)
-  val dataSources: Set[String] = Set.empty[String]
-}
-
-case class AllFieldsDesc(expr: String) extends FieldDescOnly {
-  val allFields: String = expr
-  val desc: String = allFields
-  val dataSources: Set[String] = Set.empty[String]
-}
-
-case class FieldRangeDesc(startField: FieldDescOnly, endField: FieldDescOnly) extends FieldDescOnly {
-  val desc: String = {
-    (startField, endField) match {
-      case (f1: IndexDesc, f2: IndexDesc) => s"(${f1.desc}, ${f2.desc})"
-      case _ => throw new Exception("invalid field range description")
-    }
-  }
-  val dataSources: Set[String] = Set.empty[String]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LiteralExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LiteralExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LiteralExpr.scala
deleted file mode 100644
index acf1589..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LiteralExpr.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import org.apache.griffin.measure.utils.TimeUtil
-import org.apache.spark.sql.types._
-
-import scala.util.{Failure, Success, Try}
-
-trait LiteralExpr extends Expr {
-  val value: Option[Any]
-  def calculateOnly(values: Map[String, Any]): Option[Any] = value
-  val dataSources: Set[String] = Set.empty[String]
-}
-
-case class LiteralValueExpr(value: Option[Any]) extends LiteralExpr {
-  val desc: String = value.getOrElse("").toString
-}
-
-case class LiteralStringExpr(expr: String) extends LiteralExpr {
-  val value: Option[String] = Some(expr)
-  val desc: String = s"'${value.getOrElse("")}'"
-}
-
-case class LiteralNumberExpr(expr: String) extends LiteralExpr {
-  val value: Option[Any] = {
-    if (expr.contains(".")) {
-      Try (expr.toDouble) match {
-        case Success(v) => Some(v)
-        case _ => throw new Exception(s"${expr} is invalid number")
-      }
-    } else {
-      Try (expr.toLong) match {
-        case Success(v) => Some(v)
-        case _ => throw new Exception(s"${expr} is invalid number")
-      }
-    }
-  }
-  val desc: String = value.getOrElse("").toString
-}
-
-case class LiteralTimeExpr(expr: String) extends LiteralExpr {
-  final val TimeRegex = """(\d+)(d|h|m|s|ms)""".r
-  val value: Option[Long] = TimeUtil.milliseconds(expr)
-  val desc: String = expr
-}
-
-case class LiteralBooleanExpr(expr: String) extends LiteralExpr {
-  final val TrueRegex = """(?i)true""".r
-  final val FalseRegex = """(?i)false""".r
-  val value: Option[Boolean] = expr match {
-    case TrueRegex() => Some(true)
-    case FalseRegex() => Some(false)
-    case _ => throw new Exception(s"${expr} is invalid boolean")
-  }
-  val desc: String = value.getOrElse("").toString
-}
-
-case class LiteralNullExpr(expr: String) extends LiteralExpr {
-  val value: Option[Any] = Some(null)
-  val desc: String = "null"
-}
-
-case class LiteralNoneExpr(expr: String) extends LiteralExpr {
-  val value: Option[Any] = None
-  val desc: String = "none"
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LogicalExpr.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LogicalExpr.scala b/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LogicalExpr.scala
deleted file mode 100644
index dd061d7..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/rule/expr/LogicalExpr.scala
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.rule.expr
-
-import org.apache.griffin.measure.rule.CalculationUtil._
-import org.apache.spark.sql.types.{BooleanType, DataType}
-
-trait LogicalExpr extends Expr with AnalyzableExpr {
-  override def cacheUnit: Boolean = true
-}
-
-case class LogicalSimpleExpr(expr: MathExpr) extends LogicalExpr {
-  def calculateOnly(values: Map[String, Any]): Option[Any] = expr.calculate(values)
-  val desc: String = expr.desc
-  val dataSources: Set[String] = expr.dataSources
-  override def cacheUnit: Boolean = false
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = expr.getCacheExprs(ds)
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = expr.getFinalCacheExprs(ds)
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = expr.getPersistExprs(ds)
-}
-
-case class LogicalCompareExpr(left: MathExpr, compare: String, right: MathExpr) extends LogicalExpr {
-  private val (eqOpr, neqOpr, btOpr, bteOpr, ltOpr, lteOpr) = ("""==?""".r, """!==?""".r, ">", ">=", "<", "<=")
-  def calculateOnly(values: Map[String, Any]): Option[Boolean] = {
-    val (lv, rv) = (left.calculate(values), right.calculate(values))
-    compare match {
-      case this.eqOpr() => lv === rv
-      case this.neqOpr() => lv =!= rv
-      case this.btOpr => lv > rv
-      case this.bteOpr => lv >= rv
-      case this.ltOpr => lv < rv
-      case this.lteOpr => lv <= rv
-      case _ => None
-    }
-  }
-  val desc: String = s"${left.desc} ${compare} ${right.desc}"
-  val dataSources: Set[String] = left.dataSources ++ right.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    left.getCacheExprs(ds) ++ right.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    left.getFinalCacheExprs(ds) ++ right.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    left.getPersistExprs(ds) ++ right.getPersistExprs(ds)
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = {
-    if (compare == "=" || compare == "==") {
-      (left.dataSourceOpt, right.dataSourceOpt) match {
-        case (Some(dsPair._1), Some(dsPair._2)) => (left, right) :: Nil
-        case (Some(dsPair._2), Some(dsPair._1)) => (right, left) :: Nil
-        case _ => Nil
-      }
-    } else Nil
-  }
-}
-
-case class LogicalRangeExpr(left: MathExpr, rangeOpr: String, range: RangeDesc) extends LogicalExpr {
-  private val (inOpr, ninOpr, btwnOpr, nbtwnOpr) = ("""(?i)in""".r, """(?i)not\s+in""".r, """(?i)between""".r, """(?i)not\s+between""".r)
-  def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val (lv, rvs) = (left.calculate(values), range.elements.map(_.calculate(values)))
-    rangeOpr match {
-      case this.inOpr() => lv in rvs
-      case this.ninOpr() => lv not_in rvs
-      case this.btwnOpr() => lv between rvs
-      case this.nbtwnOpr() => lv not_between rvs
-      case _ => None
-    }
-  }
-  val desc: String = s"${left.desc} ${rangeOpr} ${range.desc}"
-  val dataSources: Set[String] = left.dataSources ++ range.elements.flatMap(_.dataSources).toSet
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    left.getCacheExprs(ds) ++ range.elements.flatMap(_.getCacheExprs(ds))
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    left.getFinalCacheExprs(ds) ++ range.elements.flatMap(_.getFinalCacheExprs(ds))
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    left.getPersistExprs(ds) ++ range.elements.flatMap(_.getPersistExprs(ds))
-  }
-}
-
-// -- logical statement --
-//case class LogicalFactorExpr(self: LogicalExpr) extends LogicalExpr {
-//  def calculate(values: Map[String, Any]): Option[Any] = self.calculate(values)
-//  val desc: String = self.desc
-//}
-
-case class UnaryLogicalExpr(oprList: Iterable[String], factor: LogicalExpr) extends LogicalExpr {
-  private val notOpr = """(?i)not|!""".r
-  def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val fv = factor.calculate(values)
-    oprList.foldRight(fv) { (opr, v) =>
-      opr match {
-        case this.notOpr() => !v
-        case _ => None
-      }
-    }
-  }
-  val desc: String = oprList.foldRight(factor.desc) { (prev, ex) => s"${prev} ${ex}" }
-  val dataSources: Set[String] = factor.dataSources
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    factor.getCacheExprs(ds)
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    factor.getFinalCacheExprs(ds)
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    factor.getPersistExprs(ds)
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = {
-    val notOprList = oprList.filter { opr =>
-      opr match {
-        case this.notOpr() => true
-        case _ => false
-      }
-    }
-    if (notOprList.size % 2 == 0) factor.getGroupbyExprPairs(dsPair) else Nil
-  }
-}
-
-case class BinaryLogicalExpr(first: LogicalExpr, others: Iterable[(String, LogicalExpr)]) extends LogicalExpr {
-  private val (andOpr, orOpr) = ("""(?i)and|&&""".r, """(?i)or|\|\|""".r)
-  def calculateOnly(values: Map[String, Any]): Option[Any] = {
-    val fv = first.calculate(values)
-    others.foldLeft(fv) { (v, pair) =>
-      val (opr, next) = pair
-      val nv = next.calculate(values)
-      opr match {
-        case this.andOpr() => v && nv
-        case this.orOpr() => v || nv
-        case _ => None
-      }
-    }
-  }
-  val desc: String = others.foldLeft(first.desc) { (ex, next) => s"${ex} ${next._1} ${next._2.desc}" }
-  val dataSources: Set[String] = first.dataSources ++ others.flatMap(_._2.dataSources).toSet
-  override def getSubCacheExprs(ds: String): Iterable[Expr] = {
-    first.getCacheExprs(ds) ++ others.flatMap(_._2.getCacheExprs(ds))
-  }
-  override def getSubFinalCacheExprs(ds: String): Iterable[Expr] = {
-    first.getFinalCacheExprs(ds) ++ others.flatMap(_._2.getFinalCacheExprs(ds))
-  }
-  override def getSubPersistExprs(ds: String): Iterable[Expr] = {
-    first.getPersistExprs(ds) ++ others.flatMap(_._2.getPersistExprs(ds))
-  }
-
-  override def getGroupbyExprPairs(dsPair: (String, String)): Seq[(Expr, Expr)] = {
-    if (others.isEmpty) first.getGroupbyExprPairs(dsPair)
-    else {
-      val isAnd = others.exists(_._1 match {
-        case this.andOpr() => true
-        case _ => false
-      })
-      if (isAnd) {
-        first.getGroupbyExprPairs(dsPair) ++ others.flatMap(_._2.getGroupbyExprPairs(dsPair))
-      } else Nil
-    }
-  }
-}
\ No newline at end of file



[10/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/config/params/user/EvaluateRuleParam.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/EvaluateRuleParam.scala b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/EvaluateRuleParam.scala
index 6ee9783..2abf3e5 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/EvaluateRuleParam.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/EvaluateRuleParam.scala
@@ -23,8 +23,8 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include
 import org.apache.griffin.measure.config.params.Param
 
 @JsonInclude(Include.NON_NULL)
-case class EvaluateRuleParam( @JsonProperty("sampleRatio") sampleRatio: Double,
-                              @JsonProperty("rules") rules: String
+case class EvaluateRuleParam( @JsonProperty("dsl.type") dslType: String,
+                              @JsonProperty("rules") rules: List[Map[String, Any]]
                             ) extends Param {
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/config/params/user/UserParam.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/UserParam.scala b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/UserParam.scala
index df0647c..e55d2b4 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/config/params/user/UserParam.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/config/params/user/UserParam.scala
@@ -23,12 +23,10 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include
 import org.apache.griffin.measure.config.params.Param
 
 @JsonInclude(Include.NON_NULL)
-case class UserParam(@JsonProperty("name") name: String,
-                     @JsonProperty("type") dqType: String,
-                     @JsonProperty("process.type") procType: String,
-                     @JsonProperty("source") sourceParam: DataConnectorParam,
-                     @JsonProperty("target") targetParam: DataConnectorParam,
-                     @JsonProperty("evaluateRule") evaluateRuleParam: EvaluateRuleParam
+case class UserParam( @JsonProperty("name") name: String,
+                      @JsonProperty("process.type") procType: String,
+                      @JsonProperty("data.sources") dataSources: List[DataSourceParam],
+                      @JsonProperty("evaluateRule") evaluateRuleParam: EvaluateRuleParam
                     ) extends Param {
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnector.scala
deleted file mode 100644
index 1fb1868..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnector.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector
-
-import org.apache.griffin.measure.log.Loggable
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.DataFrame
-
-
-trait DataConnector extends Loggable with Serializable {
-
-  def available(): Boolean
-
-  def init(): Unit
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnectorFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnectorFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnectorFactory.scala
deleted file mode 100644
index 670175d..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/DataConnectorFactory.scala
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector
-
-import kafka.serializer.StringDecoder
-import org.apache.griffin.measure.config.params.user._
-import org.apache.griffin.measure.connector.cache._
-import org.apache.griffin.measure.connector.direct._
-import org.apache.griffin.measure.connector.streaming._
-import org.apache.griffin.measure.rule.RuleExprs
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming.dstream.InputDStream
-import org.apache.spark.streaming.kafka.KafkaUtils
-
-import scala.reflect.ClassTag
-import scala.util.Try
-
-object DataConnectorFactory {
-
-  val HiveRegex = """^(?i)hive$""".r
-  val AvroRegex = """^(?i)avro$""".r
-
-  val KafkaRegex = """^(?i)kafka$""".r
-
-  val TextRegex = """^(?i)text$""".r
-
-  def getDirectDataConnector(sqlContext: SQLContext,
-                             ssc: StreamingContext,
-                             dataConnectorParam: DataConnectorParam,
-                             ruleExprs: RuleExprs,
-                             globalFinalCacheMap: Map[String, Any]
-                            ): Try[DirectDataConnector] = {
-    val conType = dataConnectorParam.conType
-    val version = dataConnectorParam.version
-    val config = dataConnectorParam.config
-    Try {
-      conType match {
-        case HiveRegex() => HiveDirectDataConnector(sqlContext, config, ruleExprs, globalFinalCacheMap)
-        case AvroRegex() => AvroDirectDataConnector(sqlContext, config, ruleExprs, globalFinalCacheMap)
-        case KafkaRegex() => {
-          val ksdcTry = getStreamingDataConnector(ssc, dataConnectorParam)
-          val cdcTry = getCacheDataConnector(sqlContext, dataConnectorParam.cache)
-          KafkaCacheDirectDataConnector(ksdcTry, cdcTry, dataConnectorParam, ruleExprs, globalFinalCacheMap)
-        }
-        case _ => throw new Exception("connector creation error!")
-      }
-    }
-  }
-
-  private def getStreamingDataConnector(ssc: StreamingContext,
-                                        dataConnectorParam: DataConnectorParam
-                                       ): Try[StreamingDataConnector] = {
-    val conType = dataConnectorParam.conType
-    val version = dataConnectorParam.version
-    val config = dataConnectorParam.config
-    Try {
-      conType match {
-        case KafkaRegex() => {
-          genKafkaDataConnector(ssc, config)
-        }
-        case _ => throw new Exception("streaming connector creation error!")
-      }
-    }
-  }
-
-  private def getCacheDataConnector(sqlContext: SQLContext,
-                                    dataCacheParam: DataCacheParam
-                                   ): Try[CacheDataConnector] = {
-    if (dataCacheParam == null) {
-      throw new Exception("invalid data cache param!")
-    }
-    val cacheType = dataCacheParam.cacheType
-    Try {
-      cacheType match {
-        case HiveRegex() => HiveCacheDataConnector(sqlContext, dataCacheParam)
-        case TextRegex() => TextCacheDataConnector(sqlContext, dataCacheParam)
-        case _ => throw new Exception("cache connector creation error!")
-      }
-    }
-  }
-
-  private def genKafkaDataConnector(ssc: StreamingContext, config: Map[String, Any]) = {
-    val KeyType = "key.type"
-    val ValueType = "value.type"
-    val keyType = config.getOrElse(KeyType, "java.lang.String").toString
-    val valueType = config.getOrElse(ValueType, "java.lang.String").toString
-//    val KafkaConfig = "kafka.config"
-//    val Topics = "topics"
-//    val kafkaConfig = config.get(KafkaConfig) match {
-//      case Some(map: Map[String, Any]) => map.mapValues(_.toString).map(identity)
-//      case _ => Map[String, String]()
-//    }
-//    val topics = config.getOrElse(Topics, "").toString
-    (getClassTag(keyType), getClassTag(valueType)) match {
-      case (ClassTag(k: Class[String]), ClassTag(v: Class[String])) => {
-        if (ssc == null) throw new Exception("streaming context is null!  ")
-        new KafkaStreamingDataConnector(ssc, config) {
-          type K = String
-          type KD = StringDecoder
-          type V = String
-          type VD = StringDecoder
-          def createDStream(topicSet: Set[String]): InputDStream[(K, V)] = {
-            KafkaUtils.createDirectStream[K, V, KD, VD](ssc, kafkaConfig, topicSet)
-          }
-        }
-      }
-      case _ => {
-        throw new Exception("not supported type kafka data connector")
-      }
-    }
-  }
-
-  private def getClassTag(tp: String): ClassTag[_] = {
-    try {
-      val clazz = Class.forName(tp)
-      ClassTag(clazz)
-    } catch {
-      case e: Throwable => throw e
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/cache/CacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/CacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/cache/CacheDataConnector.scala
deleted file mode 100644
index 1dfe8e2..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/CacheDataConnector.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.cache
-
-import org.apache.griffin.measure.connector.DataConnector
-import org.apache.spark.rdd.RDD
-
-import scala.util.Try
-
-trait CacheDataConnector extends DataConnector with DataCacheable with DataUpdatable {
-
-  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit
-
-  def readData(): Try[RDD[Map[String, Any]]]
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataCacheable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataCacheable.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataCacheable.scala
deleted file mode 100644
index 2be87a6..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataCacheable.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.cache
-
-import java.util.concurrent.atomic.AtomicLong
-
-import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-
-trait DataCacheable {
-
-  protected val defCacheInfoPath = PathCounter.genPath
-
-  val cacheInfoPath: String
-  val readyTimeInterval: Long
-  val readyTimeDelay: Long
-
-  def selfCacheInfoPath = s"${TimeInfoCache.infoPath}/${cacheInfoPath}"
-
-  def selfCacheTime = TimeInfoCache.cacheTime(selfCacheInfoPath)
-  def selfLastProcTime = TimeInfoCache.lastProcTime(selfCacheInfoPath)
-  def selfReadyTime = TimeInfoCache.readyTime(selfCacheInfoPath)
-  def selfCleanTime = TimeInfoCache.cleanTime(selfCacheInfoPath)
-
-  protected def submitCacheTime(ms: Long): Unit = {
-    val map = Map[String, String]((selfCacheTime -> ms.toString))
-    InfoCacheInstance.cacheInfo(map)
-  }
-
-  protected def submitReadyTime(ms: Long): Unit = {
-    val curReadyTime = ms - readyTimeDelay
-    if (curReadyTime % readyTimeInterval == 0) {
-      val map = Map[String, String]((selfReadyTime -> curReadyTime.toString))
-      InfoCacheInstance.cacheInfo(map)
-    }
-  }
-
-  protected def submitLastProcTime(ms: Long): Unit = {
-    val map = Map[String, String]((selfLastProcTime -> ms.toString))
-    InfoCacheInstance.cacheInfo(map)
-  }
-
-  protected def submitCleanTime(ms: Long): Unit = {
-    val cleanTime = genCleanTime(ms)
-    val map = Map[String, String]((selfCleanTime -> cleanTime.toString))
-    InfoCacheInstance.cacheInfo(map)
-  }
-
-  protected def genCleanTime(ms: Long): Long = ms
-
-  protected def readCleanTime(): Option[Long] = {
-    val key = selfCleanTime
-    val keys = key :: Nil
-    InfoCacheInstance.readInfo(keys).get(key).flatMap { v =>
-      try {
-        Some(v.toLong)
-      } catch {
-        case _ => None
-      }
-    }
-  }
-
-}
-
-object PathCounter {
-  private val counter: AtomicLong = new AtomicLong(0L)
-  def genPath(): String = s"path_${increment}"
-  private def increment(): Long = {
-    counter.incrementAndGet()
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataUpdatable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataUpdatable.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataUpdatable.scala
deleted file mode 100644
index 07c8187..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/DataUpdatable.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.cache
-
-import org.apache.spark.rdd.RDD
-
-trait DataUpdatable {
-
-  def cleanOldData(): Unit = {}
-
-  def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {}
-  def updateAllOldData(oldRdd: RDD[Map[String, Any]]): Unit = {}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/cache/HiveCacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/HiveCacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/cache/HiveCacheDataConnector.scala
deleted file mode 100644
index e241188..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/HiveCacheDataConnector.scala
+++ /dev/null
@@ -1,351 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.cache
-
-import java.util.concurrent.TimeUnit
-
-import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-import org.apache.griffin.measure.config.params.user.DataCacheParam
-import org.apache.griffin.measure.result.TimeStampInfo
-import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, JsonUtil, TimeUtil}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.hive.HiveContext
-
-import scala.util.{Success, Try}
-
-case class HiveCacheDataConnector(sqlContext: SQLContext, dataCacheParam: DataCacheParam
-                                 ) extends CacheDataConnector {
-
-  if (!sqlContext.isInstanceOf[HiveContext]) {
-    throw new Exception("hive context not prepared!")
-  }
-
-  val config = dataCacheParam.config
-  val InfoPath = "info.path"
-  val cacheInfoPath: String = config.getOrElse(InfoPath, defCacheInfoPath).toString
-
-  val newCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.new")
-  val oldCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.old")
-
-  val timeRangeParam: List[String] = if (dataCacheParam.timeRange != null) dataCacheParam.timeRange else Nil
-  val deltaTimeRange: (Long, Long) = (timeRangeParam ::: List("0", "0")) match {
-    case s :: e :: _ => {
-      val ns = TimeUtil.milliseconds(s) match {
-        case Some(n) if (n < 0) => n
-        case _ => 0
-      }
-      val ne = TimeUtil.milliseconds(e) match {
-        case Some(n) if (n < 0) => n
-        case _ => 0
-      }
-      (ns, ne)
-    }
-    case _ => (0, 0)
-  }
-
-  val Database = "database"
-  val database: String = config.getOrElse(Database, "").toString
-  val TableName = "table.name"
-  val tableName: String = config.get(TableName) match {
-    case Some(s: String) if (s.nonEmpty) => s
-    case _ => throw new Exception("invalid table.name!")
-  }
-  val ParentPath = "parent.path"
-  val parentPath: String = config.get(ParentPath) match {
-    case Some(s: String) => s
-    case _ => throw new Exception("invalid parent.path!")
-  }
-  val tablePath = HdfsUtil.getHdfsFilePath(parentPath, tableName)
-
-  val concreteTableName = if (dbPrefix) s"${database}.${tableName}" else tableName
-
-  val ReadyTimeInterval = "ready.time.interval"
-  val ReadyTimeDelay = "ready.time.delay"
-  val readyTimeInterval: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeInterval, "1m").toString).getOrElse(60000L)
-  val readyTimeDelay: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeDelay, "1m").toString).getOrElse(60000L)
-
-  val TimeStampColumn: String = TimeStampInfo.key
-  val PayloadColumn: String = "payload"
-
-//  type Schema = (Long, String)
-  val schema: List[(String, String)] = List(
-    (TimeStampColumn, "bigint"),
-    (PayloadColumn, "string")
-  )
-  val schemaName = schema.map(_._1)
-
-//  type Partition = (Long, Long)
-  val partition: List[(String, String, String)] = List(
-    ("hr", "bigint", "hour"),
-    ("min", "bigint", "min")
-  )
-  val partitionName = partition.map(_._1)
-
-  private val fieldSep = """|"""
-  private val rowSep = """\n"""
-  private val rowSepLiteral = "\n"
-
-  private def dbPrefix(): Boolean = {
-    database.nonEmpty && !database.equals("default")
-  }
-
-  private def tableExists(): Boolean = {
-    Try {
-      if (dbPrefix) {
-        sqlContext.tables(database).filter(tableExistsSql).collect.size
-      } else {
-        sqlContext.tables().filter(tableExistsSql).collect.size
-      }
-    } match {
-      case Success(s) => s > 0
-      case _ => false
-    }
-  }
-
-  override def init(): Unit = {
-    try {
-      if (tableExists) {
-        // drop exist table
-        val dropSql = s"""DROP TABLE ${concreteTableName}"""
-        sqlContext.sql(dropSql)
-      }
-
-      val colsSql = schema.map { field =>
-        s"`${field._1}` ${field._2}"
-      }.mkString(", ")
-      val partitionsSql = partition.map { partition =>
-        s"`${partition._1}` ${partition._2}"
-      }.mkString(", ")
-      val sql = s"""CREATE EXTERNAL TABLE IF NOT EXISTS ${concreteTableName}
-                    |(${colsSql}) PARTITIONED BY (${partitionsSql})
-                    |ROW FORMAT DELIMITED
-                    |FIELDS TERMINATED BY '${fieldSep}'
-                    |LINES TERMINATED BY '${rowSep}'
-                    |STORED AS TEXTFILE
-                    |LOCATION '${tablePath}'""".stripMargin
-      sqlContext.sql(sql)
-    } catch {
-      case e: Throwable => throw e
-    }
-  }
-
-  def available(): Boolean = {
-    true
-  }
-
-  private def encode(data: Map[String, Any], ms: Long): Option[List[Any]] = {
-    try {
-      Some(schema.map { field =>
-        val (name, _) = field
-        name match {
-          case TimeStampColumn => ms
-          case PayloadColumn => JsonUtil.toJson(data)
-          case _ => null
-        }
-      })
-    } catch {
-      case _ => None
-    }
-  }
-
-  private def decode(data: List[Any], updateTimeStamp: Boolean): Option[Map[String, Any]] = {
-    val dataMap = schemaName.zip(data).toMap
-    dataMap.get(PayloadColumn) match {
-      case Some(v: String) => {
-        try {
-          val map = JsonUtil.toAnyMap(v)
-          val resMap = if (updateTimeStamp) {
-            dataMap.get(TimeStampColumn) match {
-              case Some(t) => map + (TimeStampColumn -> t)
-              case _ => map
-            }
-          } else map
-          Some(resMap)
-        } catch {
-          case _ => None
-        }
-      }
-      case _ => None
-    }
-  }
-
-  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit = {
-    val newCacheLocked = newCacheLock.lock(-1, TimeUnit.SECONDS)
-    if (newCacheLocked) {
-      try {
-        val ptns = getPartition(ms)
-        val ptnsPath = genPartitionHdfsPath(ptns)
-        val dirPath = s"${tablePath}/${ptnsPath}"
-        val fileName = s"${ms}"
-        val filePath = HdfsUtil.getHdfsFilePath(dirPath, fileName)
-
-        // encode data
-        val dataRdd: RDD[List[Any]] = rdd.flatMap(encode(_, ms))
-
-        // save data
-        val recordRdd: RDD[String] = dataRdd.map { dt =>
-          dt.map(_.toString).mkString(fieldSep)
-        }
-
-        val dumped = if (!recordRdd.isEmpty) {
-          HdfsFileDumpUtil.dump(filePath, recordRdd, rowSepLiteral)
-        } else false
-
-        // add partition
-        if (dumped) {
-          val sql = addPartitionSql(concreteTableName, ptns)
-          sqlContext.sql(sql)
-        }
-
-        // submit ms
-        submitCacheTime(ms)
-        submitReadyTime(ms)
-      } catch {
-        case e: Throwable => error(s"save data error: ${e.getMessage}")
-      } finally {
-        newCacheLock.unlock()
-      }
-    }
-  }
-
-  def readData(): Try[RDD[Map[String, Any]]] = Try {
-    val timeRange = TimeInfoCache.getTimeRange
-    submitLastProcTime(timeRange._2)
-
-    val reviseTimeRange = (timeRange._1 + deltaTimeRange._1, timeRange._2 + deltaTimeRange._2)
-    submitCleanTime(reviseTimeRange._1)
-
-    // read directly through partition info
-    val partitionRange = getPartitionRange(reviseTimeRange._1, reviseTimeRange._2)
-    val sql = selectSql(concreteTableName, partitionRange)
-    val df = sqlContext.sql(sql)
-
-    // decode data
-    df.flatMap { row =>
-      val dt = schemaName.map { sn =>
-        row.getAs[Any](sn)
-      }
-      decode(dt, true)
-    }
-  }
-
-  override def cleanOldData(): Unit = {
-    val oldCacheLocked = oldCacheLock.lock(-1, TimeUnit.SECONDS)
-    if (oldCacheLocked) {
-      try {
-        val cleanTime = readCleanTime()
-        cleanTime match {
-          case Some(ct) => {
-            // drop partition
-            val bound = getPartition(ct)
-            val sql = dropPartitionSql(concreteTableName, bound)
-            sqlContext.sql(sql)
-          }
-          case _ => {
-            // do nothing
-          }
-        }
-      } catch {
-        case e: Throwable => error(s"clean old data error: ${e.getMessage}")
-      } finally {
-        oldCacheLock.unlock()
-      }
-    }
-  }
-
-  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
-    // parallel process different time groups, lock is unnecessary
-    val ptns = getPartition(t)
-    val ptnsPath = genPartitionHdfsPath(ptns)
-    val dirPath = s"${tablePath}/${ptnsPath}"
-    val fileName = s"${t}"
-    val filePath = HdfsUtil.getHdfsFilePath(dirPath, fileName)
-
-    try {
-      // remove out time old data
-      HdfsFileDumpUtil.remove(dirPath, fileName, true)
-
-      // save updated old data
-      if (oldData.size > 0) {
-        val recordDatas = oldData.flatMap { dt =>
-          encode(dt, t)
-        }
-        val records: Iterable[String] = recordDatas.map { dt =>
-          dt.map(_.toString).mkString(fieldSep)
-        }
-        val dumped = HdfsFileDumpUtil.dump(filePath, records, rowSepLiteral)
-      }
-    } catch {
-      case e: Throwable => error(s"update old data error: ${e.getMessage}")
-    }
-  }
-
-  override protected def genCleanTime(ms: Long): Long = {
-    val minPartition = partition.last
-    val t1 = TimeUtil.timeToUnit(ms, minPartition._3)
-    val t2 = TimeUtil.timeFromUnit(t1, minPartition._3)
-    t2
-  }
-
-  private def getPartition(ms: Long): List[(String, Any)] = {
-    partition.map { p =>
-      val (name, _, unit) = p
-      val t = TimeUtil.timeToUnit(ms, unit)
-      (name, t)
-    }
-  }
-  private def getPartitionRange(ms1: Long, ms2: Long): List[(String, (Any, Any))] = {
-    partition.map { p =>
-      val (name, _, unit) = p
-      val t1 = TimeUtil.timeToUnit(ms1, unit)
-      val t2 = TimeUtil.timeToUnit(ms2, unit)
-      (name, (t1, t2))
-    }
-  }
-
-  private def genPartitionHdfsPath(partition: List[(String, Any)]): String = {
-    partition.map(prtn => s"${prtn._1}=${prtn._2}").mkString("/")
-  }
-  private def addPartitionSql(tbn: String, partition: List[(String, Any)]): String = {
-    val partitionSql = partition.map(ptn => (s"`${ptn._1}` = ${ptn._2}")).mkString(", ")
-    val sql = s"""ALTER TABLE ${tbn} ADD IF NOT EXISTS PARTITION (${partitionSql})"""
-    sql
-  }
-  private def selectSql(tbn: String, partitionRange: List[(String, (Any, Any))]): String = {
-    val clause = partitionRange.map { pr =>
-      val (name, (r1, r2)) = pr
-      s"""`${name}` BETWEEN '${r1}' and '${r2}'"""
-    }.mkString(" AND ")
-    val whereClause = if (clause.nonEmpty) s"WHERE ${clause}" else ""
-    val sql = s"""SELECT * FROM ${tbn} ${whereClause}"""
-    sql
-  }
-  private def dropPartitionSql(tbn: String, partition: List[(String, Any)]): String = {
-    val partitionSql = partition.map(ptn => (s"PARTITION ( `${ptn._1}` < '${ptn._2}' ) ")).mkString(", ")
-    val sql = s"""ALTER TABLE ${tbn} DROP ${partitionSql}"""
-    println(sql)
-    sql
-  }
-
-  private def tableExistsSql(): String = {
-    s"tableName LIKE '${tableName}'"
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/cache/TextCacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/TextCacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/cache/TextCacheDataConnector.scala
deleted file mode 100644
index 62b6086..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/cache/TextCacheDataConnector.scala
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.cache
-
-import java.util.concurrent.TimeUnit
-
-import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
-import org.apache.griffin.measure.config.params.user.DataCacheParam
-import org.apache.griffin.measure.result.TimeStampInfo
-import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, JsonUtil, TimeUtil}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-
-import scala.util.Try
-
-case class TextCacheDataConnector(sqlContext: SQLContext, dataCacheParam: DataCacheParam
-                                 ) extends CacheDataConnector {
-
-  val config = dataCacheParam.config
-  val InfoPath = "info.path"
-  val cacheInfoPath: String = config.getOrElse(InfoPath, defCacheInfoPath).toString
-
-  val newCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.new")
-  val oldCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.old")
-
-  val timeRangeParam: List[String] = if (dataCacheParam.timeRange != null) dataCacheParam.timeRange else Nil
-  val deltaTimeRange: (Long, Long) = (timeRangeParam ::: List("0", "0")) match {
-    case s :: e :: _ => {
-      val ns = TimeUtil.milliseconds(s) match {
-        case Some(n) if (n < 0) => n
-        case _ => 0
-      }
-      val ne = TimeUtil.milliseconds(e) match {
-        case Some(n) if (n < 0) => n
-        case _ => 0
-      }
-      (ns, ne)
-    }
-    case _ => (0, 0)
-  }
-
-  val FilePath = "file.path"
-  val filePath: String = config.get(FilePath) match {
-    case Some(s: String) => s
-    case _ => throw new Exception("invalid file.path!")
-  }
-
-  val ReadyTimeInterval = "ready.time.interval"
-  val ReadyTimeDelay = "ready.time.delay"
-  val readyTimeInterval: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeInterval, "1m").toString).getOrElse(60000L)
-  val readyTimeDelay: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeDelay, "1m").toString).getOrElse(60000L)
-
-//  val TimeStampColumn: String = TimeStampInfo.key
-//  val PayloadColumn: String = "payload"
-
-  // cache schema: Long, String
-//  val fields = List[StructField](
-//    StructField(TimeStampColumn, LongType),
-//    StructField(PayloadColumn, StringType)
-//  )
-//  val schema = StructType(fields)
-
-  //  case class CacheData(time: Long, payload: String) {
-  //    def getTime(): Long = time
-  //    def getPayload(): String = payload
-  //  }
-
-  private val rowSepLiteral = "\n"
-
-  val partitionUnits: List[String] = List("hour", "min")
-
-  override def init(): Unit = {
-    // do nothing
-  }
-
-  def available(): Boolean = {
-    true
-  }
-
-  private def encode(data: Map[String, Any], ms: Long): Option[String] = {
-    try {
-      val map = data + (TimeStampInfo.key -> ms)
-      Some(JsonUtil.toJson(map))
-    } catch {
-      case _: Throwable => None
-    }
-  }
-
-  private def decode(data: String): Option[Map[String, Any]] = {
-    try {
-      Some(JsonUtil.toAnyMap(data))
-    } catch {
-      case _: Throwable => None
-    }
-  }
-
-  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit = {
-    val newCacheLocked = newCacheLock.lock(-1, TimeUnit.SECONDS)
-    if (newCacheLocked) {
-      try {
-        val ptns = getPartition(ms)
-        val ptnsPath = genPartitionHdfsPath(ptns)
-        val dirPath = s"${filePath}/${ptnsPath}"
-        val dataFileName = s"${ms}"
-        val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
-
-        // encode data
-        val dataRdd: RDD[String] = rdd.flatMap(encode(_, ms))
-
-        // save data
-        val dumped = if (!dataRdd.isEmpty) {
-          HdfsFileDumpUtil.dump(dataFilePath, dataRdd, rowSepLiteral)
-        } else false
-
-        // submit ms
-        submitCacheTime(ms)
-        submitReadyTime(ms)
-      } catch {
-        case e: Throwable => error(s"save data error: ${e.getMessage}")
-      } finally {
-        newCacheLock.unlock()
-      }
-    }
-  }
-
-  def readData(): Try[RDD[Map[String, Any]]] = Try {
-    val timeRange = TimeInfoCache.getTimeRange
-    submitLastProcTime(timeRange._2)
-
-    val reviseTimeRange = (timeRange._1 + deltaTimeRange._1, timeRange._2 + deltaTimeRange._2)
-    submitCleanTime(reviseTimeRange._1)
-
-    // read directly through partition info
-    val partitionRanges = getPartitionRange(reviseTimeRange._1, reviseTimeRange._2)
-    println(s"read time ranges: ${reviseTimeRange}")
-    println(s"read partition ranges: ${partitionRanges}")
-
-    // list partition paths
-    val partitionPaths = listPathsBetweenRanges(filePath :: Nil, partitionRanges)
-
-    if (partitionPaths.isEmpty) {
-      sqlContext.sparkContext.emptyRDD[Map[String, Any]]
-    } else {
-      val filePaths = partitionPaths.mkString(",")
-      val rdd = sqlContext.sparkContext.textFile(filePaths)
-
-      // decode data
-      rdd.flatMap { row =>
-        decode(row)
-      }
-    }
-  }
-
-  override def cleanOldData(): Unit = {
-    val oldCacheLocked = oldCacheLock.lock(-1, TimeUnit.SECONDS)
-    if (oldCacheLocked) {
-      try {
-        val cleanTime = readCleanTime()
-        cleanTime match {
-          case Some(ct) => {
-            // drop partitions
-            val bounds = getPartition(ct)
-
-            // list partition paths
-            val earlierPaths = listPathsEarlierThanBounds(filePath :: Nil, bounds)
-
-            // delete out time data path
-            earlierPaths.foreach { path =>
-              println(s"delete hdfs path: ${path}")
-              HdfsUtil.deleteHdfsPath(path)
-            }
-          }
-          case _ => {
-            // do nothing
-          }
-        }
-      } catch {
-        case e: Throwable => error(s"clean old data error: ${e.getMessage}")
-      } finally {
-        oldCacheLock.unlock()
-      }
-    }
-  }
-
-  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
-    // parallel process different time groups, lock is unnecessary
-    val ptns = getPartition(t)
-    val ptnsPath = genPartitionHdfsPath(ptns)
-    val dirPath = s"${filePath}/${ptnsPath}"
-    val dataFileName = s"${t}"
-    val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
-
-    try {
-      // remove out time old data
-      HdfsFileDumpUtil.remove(dirPath, dataFileName, true)
-
-      // save updated old data
-      if (oldData.size > 0) {
-        val recordDatas = oldData.flatMap { dt =>
-          encode(dt, t)
-        }
-        val dumped = HdfsFileDumpUtil.dump(dataFilePath, recordDatas, rowSepLiteral)
-      }
-    } catch {
-      case e: Throwable => error(s"update old data error: ${e.getMessage}")
-    }
-  }
-
-  override protected def genCleanTime(ms: Long): Long = {
-    val minPartitionUnit = partitionUnits.last
-    val t1 = TimeUtil.timeToUnit(ms, minPartitionUnit)
-    val t2 = TimeUtil.timeFromUnit(t1, minPartitionUnit)
-    t2
-  }
-
-  private def getPartition(ms: Long): List[Long] = {
-    partitionUnits.map { unit =>
-      TimeUtil.timeToUnit(ms, unit)
-    }
-  }
-  private def getPartitionRange(ms1: Long, ms2: Long): List[(Long, Long)] = {
-    partitionUnits.map { unit =>
-      val t1 = TimeUtil.timeToUnit(ms1, unit)
-      val t2 = TimeUtil.timeToUnit(ms2, unit)
-      (t1, t2)
-    }
-  }
-
-  private def genPartitionHdfsPath(partition: List[Long]): String = {
-    partition.map(prtn => s"${prtn}").mkString("/")
-  }
-
-  private def str2Long(str: String): Option[Long] = {
-    try {
-      Some(str.toLong)
-    } catch {
-      case e: Throwable => None
-    }
-  }
-
-  // here the range means [min, max], but the best range should be (min, max]
-  private def listPathsBetweenRanges(paths: List[String],
-                                     partitionRanges: List[(Long, Long)]
-                                    ): List[String] = {
-    partitionRanges match {
-      case Nil => paths
-      case head :: tail => {
-        val (lb, ub) = head
-        val curPaths = paths.flatMap { path =>
-          val names = HdfsUtil.listSubPaths(path, "dir").toList
-          names.filter { name =>
-            str2Long(name) match {
-              case Some(t) => (t >= lb) && (t <= ub)
-              case _ => false
-            }
-          }.map(HdfsUtil.getHdfsFilePath(path, _))
-        }
-        listPathsBetweenRanges(curPaths, tail)
-      }
-    }
-  }
-
-  private def listPathsEarlierThanBounds(paths: List[String], bounds: List[Long]
-                                        ): List[String] = {
-    bounds match {
-      case Nil => paths
-      case head :: tail => {
-        val earlierPaths = paths.flatMap { path =>
-          val names = HdfsUtil.listSubPaths(path, "dir").toList
-          names.filter { name =>
-            str2Long(name) match {
-              case Some(t) => (t < head)
-              case _ => false
-            }
-          }.map(HdfsUtil.getHdfsFilePath(path, _))
-        }
-        val equalPaths = paths.flatMap { path =>
-          val names = HdfsUtil.listSubPaths(path, "dir").toList
-          names.filter { name =>
-            str2Long(name) match {
-              case Some(t) => (t == head)
-              case _ => false
-            }
-          }.map(HdfsUtil.getHdfsFilePath(path, _))
-        }
-
-        tail match {
-          case Nil => earlierPaths
-          case _ => earlierPaths ::: listPathsEarlierThanBounds(equalPaths, tail)
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/direct/AvroDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/AvroDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/direct/AvroDirectDataConnector.scala
deleted file mode 100644
index b45e5a9..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/AvroDirectDataConnector.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.direct
-
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.rule.{ExprValueUtil, RuleExprs}
-import org.apache.griffin.measure.utils.HdfsUtil
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-
-import scala.util.Try
-
-// data connector for avro file
-case class AvroDirectDataConnector(sqlContext: SQLContext, config: Map[String, Any],
-                                   ruleExprs: RuleExprs, constFinalExprValueMap: Map[String, Any]
-                                 ) extends DirectDataConnector {
-
-  val FilePath = "file.path"
-  val FileName = "file.name"
-
-  val filePath = config.getOrElse(FilePath, "").toString
-  val fileName = config.getOrElse(FileName, "").toString
-
-  val concreteFileFullPath = if (pathPrefix) s"${filePath}${fileName}" else fileName
-
-  private def pathPrefix(): Boolean = {
-    filePath.nonEmpty
-  }
-
-  private def fileExist(): Boolean = {
-    HdfsUtil.existPath(concreteFileFullPath)
-  }
-
-  def available(): Boolean = {
-    (!concreteFileFullPath.isEmpty) && fileExist
-  }
-
-  def init(): Unit = {}
-
-  def metaData(): Try[Iterable[(String, String)]] = {
-    Try {
-      val st = sqlContext.read.format("com.databricks.spark.avro").load(concreteFileFullPath).schema
-      st.fields.map(f => (f.name, f.dataType.typeName))
-    }
-  }
-
-  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = {
-    Try {
-      loadDataFile.flatMap { row =>
-        // generate cache data
-        val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(row), ruleExprs.cacheExprs, constFinalExprValueMap)
-        val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
-
-        // data info
-        val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
-          try {
-            (info.key -> row.getAs[info.T](info.key))
-          } catch {
-            case e: Throwable => info.defWrap
-          }
-        }.toMap
-
-        finalExprValueMaps.flatMap { finalExprValueMap =>
-          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
-            expr.calculate(finalExprValueMap) match {
-              case Some(v) => Some(v.asInstanceOf[AnyRef])
-              case _ => None
-            }
-          }
-          val key = toTuple(groupbyData)
-
-          Some((key, (finalExprValueMap, dataInfoMap)))
-        }
-
-//        val cacheExprValueMap: Map[String, Any] = ruleExprs.cacheExprs.foldLeft(constFinalExprValueMap) { (cachedMap, expr) =>
-//          ExprValueUtil.genExprValueMaps(Some(row), expr, cachedMap)
-//        }
-//        val finalExprValueMap = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMap)
-
-        // when clause filter data source
-//        val whenResult = ruleExprs.whenClauseExprOpt match {
-//          case Some(whenClause) => whenClause.calculate(finalExprValueMap)
-//          case _ => None
-//        }
-//
-//        // get groupby data
-//        whenResult match {
-//          case Some(false) => None
-//          case _ => {
-//            val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
-//              expr.calculate(finalExprValueMap) match {
-//                case Some(v) => Some(v.asInstanceOf[AnyRef])
-//                case _ => None
-//              }
-//            }
-//            val key = toTuple(groupbyData)
-//
-//            Some((key, finalExprValueMap))
-//          }
-//        }
-      }
-    }
-  }
-
-  private def loadDataFile() = {
-    sqlContext.read.format("com.databricks.spark.avro").load(concreteFileFullPath)
-  }
-
-  private def toTuple[A <: AnyRef](as: Seq[A]): Product = {
-    if (as.size > 0) {
-      val tupleClass = Class.forName("scala.Tuple" + as.size)
-      tupleClass.getConstructors.apply(0).newInstance(as: _*).asInstanceOf[Product]
-    } else None
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/direct/DirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/DirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/direct/DirectDataConnector.scala
deleted file mode 100644
index ac1a792..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/DirectDataConnector.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.direct
-
-import org.apache.griffin.measure.connector.DataConnector
-import org.apache.griffin.measure.connector.cache.DataUpdatable
-import org.apache.spark.rdd.RDD
-
-import scala.util.Try
-
-
-trait DirectDataConnector extends DataConnector with DataUpdatable {
-
-  def metaData(): Try[Iterable[(String, String)]]
-
-  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]]
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/direct/HiveDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/HiveDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/direct/HiveDirectDataConnector.scala
deleted file mode 100644
index 7de2b02..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/HiveDirectDataConnector.scala
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.direct
-
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.rule.{ExprValueUtil, RuleExprs}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-
-import scala.util.{Success, Try}
-
-// data connector for hive
-case class HiveDirectDataConnector(sqlContext: SQLContext, config: Map[String, Any],
-                                   ruleExprs: RuleExprs, constFinalExprValueMap: Map[String, Any]
-                                 ) extends DirectDataConnector {
-
-  val Database = "database"
-  val TableName = "table.name"
-  val Partitions = "partitions"
-
-  val database = config.getOrElse(Database, "").toString
-  val tableName = config.getOrElse(TableName, "").toString
-  val partitionsString = config.getOrElse(Partitions, "").toString
-
-  val concreteTableName = if (dbPrefix) s"${database}.${tableName}" else tableName
-  val partitions = partitionsString.split(";").map(s => s.split(",").map(_.trim))
-
-  private def dbPrefix(): Boolean = {
-    database.nonEmpty && !database.equals("default")
-  }
-
-  def available(): Boolean = {
-    (!tableName.isEmpty) && {
-      Try {
-        if (dbPrefix) {
-          sqlContext.tables(database).filter(tableExistsSql).collect.size
-        } else {
-          sqlContext.tables().filter(tableExistsSql).collect.size
-        }
-      } match {
-        case Success(s) => s > 0
-        case _ => false
-      }
-    }
-  }
-
-  def init(): Unit = {}
-
-  def metaData(): Try[Iterable[(String, String)]] = {
-    Try {
-      val originRows = sqlContext.sql(metaDataSql).map(r => (r.getString(0), r.getString(1))).collect
-      val partitionPos: Int = originRows.indexWhere(pair => pair._1.startsWith("# "))
-      if (partitionPos < 0) originRows
-      else originRows.take(partitionPos)
-    }
-  }
-
-  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = {
-    Try {
-      sqlContext.sql(dataSql).flatMap { row =>
-        // generate cache data
-        val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(row), ruleExprs.cacheExprs, constFinalExprValueMap)
-        val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
-
-        // data info
-        val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
-          try {
-            (info.key -> row.getAs[info.T](info.key))
-          } catch {
-            case e: Throwable => info.defWrap
-          }
-        }.toMap
-
-        finalExprValueMaps.flatMap { finalExprValueMap =>
-          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
-            expr.calculate(finalExprValueMap) match {
-              case Some(v) => Some(v.asInstanceOf[AnyRef])
-              case _ => None
-            }
-          }
-          val key = toTuple(groupbyData)
-
-          Some((key, (finalExprValueMap, dataInfoMap)))
-        }
-
-        // generate cache data
-//        val cacheExprValueMap: Map[String, Any] = ruleExprs.cacheExprs.foldLeft(constFinalExprValueMap) { (cachedMap, expr) =>
-//          ExprValueUtil.genExprValueMap(Some(row), expr, cachedMap)
-//        }
-//        val finalExprValueMap = ExprValueUtil.updateExprValueMap(ruleExprs.finalCacheExprs, cacheExprValueMap)
-//
-//        // when clause filter data source
-//        val whenResult = ruleExprs.whenClauseExprOpt match {
-//          case Some(whenClause) => whenClause.calculate(finalExprValueMap)
-//          case _ => None
-//        }
-//
-//        // get groupby data
-//        whenResult match {
-//          case Some(false) => None
-//          case _ => {
-//            val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
-//              expr.calculate(finalExprValueMap) match {
-//                case Some(v) => Some(v.asInstanceOf[AnyRef])
-//                case _ => None
-//              }
-//            }
-//            val key = toTuple(groupbyData)
-//
-//            Some((key, finalExprValueMap))
-//          }
-//        }
-      }
-    }
-  }
-
-  private def tableExistsSql(): String = {
-//    s"SHOW TABLES LIKE '${concreteTableName}'"    // this is hive sql, but not work for spark sql
-    s"tableName LIKE '${tableName}'"
-  }
-
-  private def metaDataSql(): String = {
-    s"DESCRIBE ${concreteTableName}"
-  }
-
-  private def dataSql(): String = {
-    val clauses = partitions.map { prtn =>
-      val cls = prtn.mkString(" AND ")
-      if (cls.isEmpty) s"SELECT * FROM ${concreteTableName}"
-      else s"SELECT * FROM ${concreteTableName} WHERE ${cls}"
-    }
-    clauses.mkString(" UNION ALL ")
-  }
-
-  private def toTuple[A <: AnyRef](as: Seq[A]): Product = {
-    if (as.size > 0) {
-      val tupleClass = Class.forName("scala.Tuple" + as.size)
-      tupleClass.getConstructors.apply(0).newInstance(as: _*).asInstanceOf[Product]
-    } else None
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/direct/KafkaCacheDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/KafkaCacheDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/direct/KafkaCacheDirectDataConnector.scala
deleted file mode 100644
index d2534cc..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/KafkaCacheDirectDataConnector.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.direct
-
-import org.apache.griffin.measure.config.params.user.DataConnectorParam
-import org.apache.griffin.measure.connector.DataConnectorFactory
-import org.apache.griffin.measure.connector.cache.CacheDataConnector
-import org.apache.griffin.measure.connector.streaming.StreamingDataConnector
-import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.rule._
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.streaming.StreamingContext
-
-import scala.util.{Failure, Success, Try}
-
-case class KafkaCacheDirectDataConnector(@transient streamingDataConnectorTry: Try[StreamingDataConnector],
-                                         cacheDataConnectorTry: Try[CacheDataConnector],
-                                         dataConnectorParam: DataConnectorParam,
-                                         ruleExprs: RuleExprs,
-                                         constFinalExprValueMap: Map[String, Any]
-                                        ) extends StreamingCacheDirectDataConnector {
-
-  val cacheDataConnector: CacheDataConnector = cacheDataConnectorTry match {
-    case Success(cntr) => cntr
-    case Failure(ex) => throw ex
-  }
-  @transient val streamingDataConnector: StreamingDataConnector = streamingDataConnectorTry match {
-    case Success(cntr) => cntr
-    case Failure(ex) => throw ex
-  }
-
-  protected def transform(rdd: RDD[(streamingDataConnector.K, streamingDataConnector.V)],
-                          ms: Long
-                         ): RDD[Map[String, Any]] = {
-    val dataInfoMap = DataInfo.cacheInfoList.map(_.defWrap).toMap + TimeStampInfo.wrap(ms)
-
-    rdd.flatMap { kv =>
-      val msg = kv._2
-
-      val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(msg), ruleExprs.cacheExprs, constFinalExprValueMap)
-      val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
-
-      finalExprValueMaps.map { vm =>
-        vm ++ dataInfoMap
-      }
-    }
-  }
-
-  def metaData(): Try[Iterable[(String, String)]] = Try {
-    Map.empty[String, String]
-  }
-
-  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = Try {
-    cacheDataConnector.readData match {
-      case Success(rdd) => {
-        rdd.flatMap { row =>
-          val finalExprValueMap = ruleExprs.finalCacheExprs.flatMap { expr =>
-            row.get(expr._id).flatMap { d =>
-              Some((expr._id, d))
-            }
-          }.toMap
-
-          val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
-            row.get(info.key) match {
-              case Some(d) => (info.key -> d)
-              case _ => info.defWrap
-            }
-          }.toMap
-
-          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
-            expr.calculate(finalExprValueMap) match {
-              case Some(v) => Some(v.asInstanceOf[AnyRef])
-              case _ => None
-            }
-          }
-          val key = toTuple(groupbyData)
-
-          Some((key, (finalExprValueMap, dataInfoMap)))
-        }
-      }
-      case Failure(ex) => throw ex
-    }
-  }
-
-  override def cleanOldData(): Unit = {
-    cacheDataConnector.cleanOldData
-  }
-
-  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
-    if (dataConnectorParam.getMatchOnce) {
-      cacheDataConnector.updateOldData(t, oldData)
-    }
-  }
-
-  override def updateAllOldData(oldRdd: RDD[Map[String, Any]]): Unit = {
-    if (dataConnectorParam.getMatchOnce) {
-      cacheDataConnector.updateAllOldData(oldRdd)
-    }
-  }
-
-  private def toTuple[A <: AnyRef](as: Seq[A]): Product = {
-    if (as.size > 0) {
-      val tupleClass = Class.forName("scala.Tuple" + as.size)
-      tupleClass.getConstructors.apply(0).newInstance(as: _*).asInstanceOf[Product]
-    } else None
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/direct/StreamingCacheDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/StreamingCacheDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/direct/StreamingCacheDirectDataConnector.scala
deleted file mode 100644
index 87139d6..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/direct/StreamingCacheDirectDataConnector.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.direct
-
-import org.apache.griffin.measure.connector.cache.CacheDataConnector
-import org.apache.griffin.measure.connector.streaming.StreamingDataConnector
-import org.apache.griffin.measure.result.{DataInfo, TimeStampInfo}
-import org.apache.griffin.measure.rule.ExprValueUtil
-import org.apache.spark.rdd.RDD
-
-import scala.util.{Failure, Success}
-
-trait StreamingCacheDirectDataConnector extends DirectDataConnector {
-
-  val cacheDataConnector: CacheDataConnector
-  @transient val streamingDataConnector: StreamingDataConnector
-
-  def available(): Boolean = {
-    cacheDataConnector.available && streamingDataConnector.available
-  }
-
-  def init(): Unit = {
-    cacheDataConnector.init
-
-    val ds = streamingDataConnector.stream match {
-      case Success(dstream) => dstream
-      case Failure(ex) => throw ex
-    }
-
-    ds.foreachRDD((rdd, time) => {
-      val ms = time.milliseconds
-
-      val valueMapRdd = transform(rdd, ms)
-
-      // save data frame
-      cacheDataConnector.saveData(valueMapRdd, ms)
-    })
-  }
-
-  protected def transform(rdd: RDD[(streamingDataConnector.K, streamingDataConnector.V)],
-                          ms: Long
-                         ): RDD[Map[String, Any]]
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/KafkaStreamingDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/KafkaStreamingDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/KafkaStreamingDataConnector.scala
deleted file mode 100644
index fdd511d..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/KafkaStreamingDataConnector.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.streaming
-
-import kafka.serializer.Decoder
-import org.apache.griffin.measure.connector.cache.{CacheDataConnector, DataCacheable}
-import org.apache.griffin.measure.result.{DataInfo, TimeStampInfo}
-import org.apache.griffin.measure.rule.{ExprValueUtil, RuleExprs}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming.dstream.InputDStream
-
-import scala.util.{Failure, Success, Try}
-
-abstract class KafkaStreamingDataConnector(@transient ssc: StreamingContext,
-                                           config: Map[String, Any]
-                                          ) extends StreamingDataConnector {
-  type KD <: Decoder[K]
-  type VD <: Decoder[V]
-
-  val KafkaConfig = "kafka.config"
-  val Topics = "topics"
-
-  val kafkaConfig = config.get(KafkaConfig) match {
-    case Some(map: Map[String, Any]) => map.mapValues(_.toString).map(identity)
-    case _ => Map[String, String]()
-  }
-  val topics = config.getOrElse(Topics, "").toString
-
-  def available(): Boolean = {
-    true
-  }
-
-  def init(): Unit = {}
-
-  def stream(): Try[InputDStream[(K, V)]] = Try {
-    val topicSet = topics.split(",").toSet
-    createDStream(topicSet)
-  }
-
-  protected def createDStream(topicSet: Set[String]): InputDStream[(K, V)]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/StreamingDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/StreamingDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/StreamingDataConnector.scala
deleted file mode 100644
index c37caac..0000000
--- a/measure/src/main/scala/org/apache/griffin/measure/connector/streaming/StreamingDataConnector.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.measure.connector.streaming
-
-import org.apache.griffin.measure.connector.DataConnector
-import org.apache.spark.streaming.dstream.InputDStream
-
-import scala.util.Try
-
-
-trait StreamingDataConnector extends DataConnector {
-
-  type K
-  type V
-
-  def stream(): Try[InputDStream[(K, V)]]
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnector.scala
new file mode 100644
index 0000000..534fb1b
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnector.scala
@@ -0,0 +1,114 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector
+
+import java.util.concurrent.atomic.AtomicLong
+
+import org.apache.griffin.measure.config.params.user.DataConnectorParam
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.process.engine._
+import org.apache.griffin.measure.rule.adaptor.{PreProcPhase, RuleAdaptorGroup, RunPhase}
+import org.apache.griffin.measure.rule.dsl._
+import org.apache.griffin.measure.rule.preproc.PreProcRuleGenerator
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.functions._
+import org.apache.spark.sql.{DataFrame, SQLContext}
+
+
+trait DataConnector extends Loggable with Serializable {
+
+//  def available(): Boolean
+
+  def init(): Unit
+
+  def data(ms: Long): Option[DataFrame]
+
+  val dqEngines: DqEngines
+
+  val dcParam: DataConnectorParam
+
+  val sqlContext: SQLContext
+
+  val id: String = DataConnectorIdGenerator.genId
+
+  protected def suffix(ms: Long): String = s"${id}_${ms}"
+  protected def thisName(ms: Long): String = s"this_${suffix(ms)}"
+
+  final val tmstColName = GroupByColumn.tmst
+
+  def preProcess(dfOpt: Option[DataFrame], ms: Long): Option[DataFrame] = {
+    val thisTable = thisName(ms)
+    val preProcRules = PreProcRuleGenerator.genPreProcRules(dcParam.preProc, suffix(ms))
+    val names = PreProcRuleGenerator.getRuleNames(preProcRules).toSet + thisTable
+
+    try {
+      dfOpt.flatMap { df =>
+        // in data
+        df.registerTempTable(thisTable)
+
+        // generate rule steps
+        val ruleSteps = RuleAdaptorGroup.genConcreteRuleSteps(preProcRules, DslType("spark-sql"), PreProcPhase)
+
+        // run rules
+        dqEngines.runRuleSteps(ruleSteps)
+
+        // out data
+        val outDf = sqlContext.table(thisTable)
+
+        // drop temp table
+        names.foreach { name =>
+          try {
+            sqlContext.dropTempTable(name)
+          } catch {
+            case e: Throwable => warn(s"drop temp table ${name} fails")
+          }
+        }
+
+        // add tmst
+        val withTmstDf = outDf.withColumn(tmstColName, lit(ms))
+
+        Some(withTmstDf)
+      }
+    } catch {
+      case e: Throwable => {
+        error(s"preporcess of data connector [${id}] error: ${e.getMessage}")
+        None
+      }
+    }
+
+  }
+
+}
+
+object DataConnectorIdGenerator {
+  private val counter: AtomicLong = new AtomicLong(0L)
+  private val head: String = "dc"
+
+  def genId: String = {
+    s"${head}${increment}"
+  }
+
+  private def increment: Long = {
+    counter.incrementAndGet()
+  }
+}
+
+object GroupByColumn {
+  val tmst = "__tmst"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnectorFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnectorFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnectorFactory.scala
new file mode 100644
index 0000000..9c3383f
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/DataConnectorFactory.scala
@@ -0,0 +1,150 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector
+
+import kafka.serializer.StringDecoder
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.data.connector.streaming.{KafkaStreamingDataConnector, KafkaStreamingStringDataConnector, StreamingDataConnector}
+import org.apache.griffin.measure.process.engine.{DqEngine, DqEngines}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+
+import scala.util.Success
+//import org.apache.griffin.measure.data.connector.cache._
+import org.apache.griffin.measure.data.connector.batch._
+//import org.apache.griffin.measure.data.connector.streaming._
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.dstream.InputDStream
+import org.apache.spark.streaming.kafka.KafkaUtils
+
+import scala.reflect.ClassTag
+import scala.util.Try
+
+object DataConnectorFactory {
+
+  val HiveRegex = """^(?i)hive$""".r
+  val AvroRegex = """^(?i)avro$""".r
+  val TextDirRegex = """^(?i)text-dir$""".r
+
+  val KafkaRegex = """^(?i)kafka$""".r
+
+  val TextRegex = """^(?i)text$""".r
+
+  def getDataConnector(sqlContext: SQLContext,
+                       @transient ssc: StreamingContext,
+                       dqEngines: DqEngines,
+                       dataConnectorParam: DataConnectorParam
+                      ): Try[DataConnector] = {
+    val conType = dataConnectorParam.conType
+    val version = dataConnectorParam.version
+    val config = dataConnectorParam.config
+    Try {
+      conType match {
+        case HiveRegex() => HiveBatchDataConnector(sqlContext, dqEngines, dataConnectorParam)
+        case AvroRegex() => AvroBatchDataConnector(sqlContext, dqEngines, dataConnectorParam)
+        case TextDirRegex() => TextDirBatchDataConnector(sqlContext, dqEngines, dataConnectorParam)
+        case KafkaRegex() => {
+//          val ksdcTry = getStreamingDataConnector(ssc, dataConnectorParam)
+//          val cdcTry = getCacheDataConnector(sqlContext, dataConnectorParam.cache)
+//          KafkaCacheDirectDataConnector(ksdcTry, cdcTry, dataConnectorParam)
+          getStreamingDataConnector(sqlContext, ssc, dqEngines, dataConnectorParam)
+        }
+        case _ => throw new Exception("connector creation error!")
+      }
+    }
+  }
+
+  private def getStreamingDataConnector(sqlContext: SQLContext,
+                                        @transient ssc: StreamingContext,
+                                        dqEngines: DqEngines,
+                                        dataConnectorParam: DataConnectorParam
+                                       ): StreamingDataConnector = {
+    if (ssc == null) throw new Exception("streaming context is null!")
+    val conType = dataConnectorParam.conType
+    val version = dataConnectorParam.version
+    conType match {
+      case KafkaRegex() => genKafkaDataConnector(sqlContext, ssc, dqEngines, dataConnectorParam)
+      case _ => throw new Exception("streaming connector creation error!")
+    }
+  }
+//
+//  private def getCacheDataConnector(sqlContext: SQLContext,
+//                                    dataCacheParam: DataCacheParam
+//                                   ): Try[CacheDataConnector] = {
+//    if (dataCacheParam == null) {
+//      throw new Exception("invalid data cache param!")
+//    }
+//    val cacheType = dataCacheParam.cacheType
+//    Try {
+//      cacheType match {
+//        case HiveRegex() => HiveCacheDataConnector(sqlContext, dataCacheParam)
+//        case TextRegex() => TextCacheDataConnector(sqlContext, dataCacheParam)
+//        case _ => throw new Exception("cache connector creation error!")
+//      }
+//    }
+//  }
+//
+  private def genKafkaDataConnector(sqlContext: SQLContext,
+                                    @transient ssc: StreamingContext,
+                                    dqEngines: DqEngines,
+                                    dataConnectorParam: DataConnectorParam
+                                   ) = {
+    val config = dataConnectorParam.config
+    val KeyType = "key.type"
+    val ValueType = "value.type"
+    val keyType = config.getOrElse(KeyType, "java.lang.String").toString
+    val valueType = config.getOrElse(ValueType, "java.lang.String").toString
+    (getClassTag(keyType), getClassTag(valueType)) match {
+      case (ClassTag(k: Class[String]), ClassTag(v: Class[String])) => {
+        KafkaStreamingStringDataConnector(sqlContext, ssc, dqEngines, dataConnectorParam)
+      }
+      case _ => {
+        throw new Exception("not supported type kafka data connector")
+      }
+    }
+  }
+
+  private def getClassTag(tp: String): ClassTag[_] = {
+    try {
+      val clazz = Class.forName(tp)
+      ClassTag(clazz)
+    } catch {
+      case e: Throwable => throw e
+    }
+  }
+
+  def filterBatchDataConnectors(connectors: Seq[DataConnector]): Seq[BatchDataConnector] = {
+    connectors.flatMap { dc =>
+      dc match {
+        case mdc: BatchDataConnector => Some(mdc)
+        case _ => None
+      }
+    }
+  }
+  def filterStreamingDataConnectors(connectors: Seq[DataConnector]): Seq[StreamingDataConnector] = {
+    connectors.flatMap { dc =>
+      dc match {
+        case mdc: StreamingDataConnector => Some(mdc)
+        case _ => None
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/AvroBatchDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/AvroBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/AvroBatchDataConnector.scala
new file mode 100644
index 0000000..ccd6441
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/AvroBatchDataConnector.scala
@@ -0,0 +1,112 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.batch
+
+import org.apache.griffin.measure.config.params.user.DataConnectorParam
+import org.apache.griffin.measure.data.connector._
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.griffin.measure.result._
+import org.apache.griffin.measure.utils.HdfsUtil
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, SQLContext}
+import org.apache.griffin.measure.utils.ParamUtil._
+
+import scala.util.Try
+
+// data connector for avro file
+case class AvroBatchDataConnector(sqlContext: SQLContext, dqEngines: DqEngines, dcParam: DataConnectorParam
+                                 ) extends BatchDataConnector {
+
+  val config = dcParam.config
+
+  val FilePath = "file.path"
+  val FileName = "file.name"
+
+  val filePath = config.getString(FilePath, "")
+  val fileName = config.getString(FileName, "")
+
+  val concreteFileFullPath = if (pathPrefix) s"${filePath}${fileName}" else fileName
+
+  private def pathPrefix(): Boolean = {
+    filePath.nonEmpty
+  }
+
+  private def fileExist(): Boolean = {
+    HdfsUtil.existPath(concreteFileFullPath)
+  }
+
+  def data(ms: Long): Option[DataFrame] = {
+    try {
+      val df = sqlContext.read.format("com.databricks.spark.avro").load(concreteFileFullPath)
+      val dfOpt = Some(df)
+      val preDfOpt = preProcess(dfOpt, ms)
+      preDfOpt
+    } catch {
+      case e: Throwable => {
+        error(s"load avro file ${concreteFileFullPath} fails")
+        None
+      }
+    }
+  }
+
+//  def available(): Boolean = {
+//    (!concreteFileFullPath.isEmpty) && fileExist
+//  }
+
+//  def init(): Unit = {}
+
+//  def metaData(): Try[Iterable[(String, String)]] = {
+//    Try {
+//      val st = sqlContext.read.format("com.databricks.spark.avro").load(concreteFileFullPath).schema
+//      st.fields.map(f => (f.name, f.dataType.typeName))
+//    }
+//  }
+
+//  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = {
+//    Try {
+//      loadDataFile.flatMap { row =>
+//        // generate cache data
+//        val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(row), ruleExprs.cacheExprs, constFinalExprValueMap)
+//        val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
+//
+//        // data info
+//        val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
+//          try {
+//            (info.key -> row.getAs[info.T](info.key))
+//          } catch {
+//            case e: Throwable => info.defWrap
+//          }
+//        }.toMap
+//
+//        finalExprValueMaps.flatMap { finalExprValueMap =>
+//          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
+//            expr.calculate(finalExprValueMap) match {
+//              case Some(v) => Some(v.asInstanceOf[AnyRef])
+//              case _ => None
+//            }
+//          }
+//          val key = toTuple(groupbyData)
+//
+//          Some((key, (finalExprValueMap, dataInfoMap)))
+//        }
+//      }
+//    }
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/BatchDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/BatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/BatchDataConnector.scala
new file mode 100644
index 0000000..4d138ab
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/BatchDataConnector.scala
@@ -0,0 +1,35 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.batch
+
+import org.apache.griffin.measure.data.connector._
+//import org.apache.griffin.measure.data.connector.cache.DataUpdatable
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.sql.types.StructType
+
+import scala.util.{Failure, Success, Try}
+
+
+trait BatchDataConnector extends DataConnector {
+
+//  def metaData(): Option[StructType]
+
+  def init(): Unit = {}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/HiveBatchDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/HiveBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/HiveBatchDataConnector.scala
new file mode 100644
index 0000000..5d80d0e
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/HiveBatchDataConnector.scala
@@ -0,0 +1,149 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.batch
+
+import org.apache.griffin.measure.config.params.user.DataConnectorParam
+import org.apache.griffin.measure.data.connector._
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.griffin.measure.result._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.{DataFrame, SQLContext}
+
+import scala.util.{Success, Try}
+import org.apache.griffin.measure.utils.ParamUtil._
+
+// data connector for hive
+case class HiveBatchDataConnector(sqlContext: SQLContext, dqEngines: DqEngines, dcParam: DataConnectorParam
+                                  ) extends BatchDataConnector {
+
+  val config = dcParam.config
+
+  if (!sqlContext.isInstanceOf[HiveContext]) {
+    throw new Exception("hive context not prepared!")
+  }
+
+  val Database = "database"
+  val TableName = "table.name"
+  val Partitions = "partitions"
+
+  val database = config.getString(Database, "default")
+  val tableName = config.getString(TableName, "")
+  val partitionsString = config.getString(Partitions, "")
+
+  val concreteTableName = s"${database}.${tableName}"
+  val partitions = partitionsString.split(";").map(s => s.split(",").map(_.trim))
+
+  def data(ms: Long): Option[DataFrame] = {
+    try {
+      val df = sqlContext.sql(dataSql)
+      val dfOpt = Some(df)
+      val preDfOpt = preProcess(dfOpt, ms)
+      preDfOpt
+    } catch {
+      case e: Throwable => {
+        error(s"load hive table ${concreteTableName} fails")
+        None
+      }
+    }
+  }
+
+//  def available(): Boolean = {
+//    (!tableName.isEmpty) && {
+//      Try {
+//        if (dbPrefix) {
+//          sqlContext.tables(database).filter(tableExistsSql).collect.size
+//        } else {
+//          sqlContext.tables().filter(tableExistsSql).collect.size
+//        }
+//      } match {
+//        case Success(s) => s > 0
+//        case _ => false
+//      }
+//    }
+//  }
+
+//  def init(): Unit = {}
+
+//  def metaData(): Try[Iterable[(String, String)]] = {
+//    Try {
+//      val originRows = sqlContext.sql(metaDataSql).map(r => (r.getString(0), r.getString(1))).collect
+//      val partitionPos: Int = originRows.indexWhere(pair => pair._1.startsWith("# "))
+//      if (partitionPos < 0) originRows
+//      else originRows.take(partitionPos)
+//    }
+//  }
+
+//  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = {
+//    Try {
+//      sqlContext.sql(dataSql).flatMap { row =>
+//        // generate cache data
+//        val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(row), ruleExprs.cacheExprs, constFinalExprValueMap)
+//        val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
+//
+//        // data info
+//        val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
+//          try {
+//            (info.key -> row.getAs[info.T](info.key))
+//          } catch {
+//            case e: Throwable => info.defWrap
+//          }
+//        }.toMap
+//
+//        finalExprValueMaps.flatMap { finalExprValueMap =>
+//          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
+//            expr.calculate(finalExprValueMap) match {
+//              case Some(v) => Some(v.asInstanceOf[AnyRef])
+//              case _ => None
+//            }
+//          }
+//          val key = toTuple(groupbyData)
+//
+//          Some((key, (finalExprValueMap, dataInfoMap)))
+//        }
+//      }
+//    }
+//  }
+
+  private def tableExistsSql(): String = {
+//    s"SHOW TABLES LIKE '${concreteTableName}'"    // this is hive sql, but not work for spark sql
+    s"tableName LIKE '${tableName}'"
+  }
+
+  private def metaDataSql(): String = {
+    s"DESCRIBE ${concreteTableName}"
+  }
+
+  private def dataSql(): String = {
+    val clauses = partitions.map { prtn =>
+      val cls = prtn.mkString(" AND ")
+      if (cls.isEmpty) s"SELECT * FROM ${concreteTableName}"
+      else s"SELECT * FROM ${concreteTableName} WHERE ${cls}"
+    }
+    clauses.mkString(" UNION ALL ")
+  }
+
+//  private def toTuple[A <: AnyRef](as: Seq[A]): Product = {
+//    if (as.size > 0) {
+//      val tupleClass = Class.forName("scala.Tuple" + as.size)
+//      tupleClass.getConstructors.apply(0).newInstance(as: _*).asInstanceOf[Product]
+//    } else None
+//  }
+
+}



[03/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/test/resources/output.msg
----------------------------------------------------------------------
diff --git a/measure/src/test/resources/output.msg b/measure/src/test/resources/output.msg
new file mode 100644
index 0000000..4ec9288
--- /dev/null
+++ b/measure/src/test/resources/output.msg
@@ -0,0 +1 @@
+{"groups":[{"_name":"DefaultGroup","groupType":"FULLSITE","storeType":"CATALOG","attrsList":[{"name":"CENTERCOL","dataType":"STRING","values":["\u003cdiv id\u003d\"centerCol\" class\u003d\"centerColumn\"\u003e  \u003cdiv id\u003d\"booksTitle\" class\u003d\"feature\" data-feature-name\u003d\"booksTitle\"\u003e   \u003cdiv class\u003d\"a-section a-spacing-none\"\u003e    \u003ch1 id\u003d\"title\" class\u003d\"a-size-large a-spacing-none\"\u003e \u003cspan id\u003d\"productTitle\" class\u003d\"a-size-large\"\u003eBefore the First Day\u003c/span\u003e \u003cspan class\u003d\"a-size-medium a-color-secondary a-text-normal\"\u003ePaperback\u003c/span\u003e     \u003c!--  use pre formatted date that complies with legal requirement from media matrix --\u003e \u003cspan class\u003d\"a-size-medium a-color-secondary a-text-normal\"\u003e– July 22, 2013\u003c/span\u003e \u003c/h1\u003e   \u003c/div\u003e   \u003cdiv id\u003d\"byline\" class\u003d\"a-section a-spacing-micro bylineHidden featur
 e\"\u003e    by    \u003cspan class\u003d\"author notFaded\" data-width\u003d\"\"\u003e \u003ca class\u003d\"a-link-normal\" href\u003d\"/s/ref\u003ddp_byline_sr_book_1?ie\u003dUTF8\u0026amp;text\u003dStephen+M.+Hale\u0026amp;search-alias\u003dbooks\u0026amp;field-author\u003dStephen+M.+Hale\u0026amp;sort\u003drelevancerank\"\u003eStephen M. Hale\u003c/a\u003e \u003cspan class\u003d\"contribution\" spacing\u003d\"none\"\u003e \u003cspan class\u003d\"a-color-secondary\"\u003e(Author)\u003c/span\u003e \u003c/span\u003e \u003c/span\u003e   \u003c/div\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"averageCustomerReviews_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"averageCustomerReviews\"\u003e   \u003cstyle type\u003d\"text/css\"\u003e    /*     * Fix for UDP-1061. Average customer reviews has a small extra line on hover     * https://omni-grok.amazon.com/xref/src/appgroup/websiteTemplates/retail/SoftlinesDetailPageAssets/udp-intl-lock/src/legacy.css?indexName\u003dWebsi
 teTemplates#40    */    .noUnderline a:hover {         text-decoration: none;     }\u003c/style\u003e   \u003cdiv id\u003d\"averageCustomerReviews\" class\u003d\"a-spacing-none\" data-asin\u003d\"1628391340\" data-ref\u003d\"dpx_acr_pop_\"\u003e    \u003cspan class\u003d\"a-declarative\" data-action\u003d\"acrStarsLink-click-metrics\" data-acrstarslink-click-metrics\u003d\"{}\"\u003e \u003cspan id\u003d\"acrPopover\" class\u003d\"reviewCountTextLinkedHistogram noUnderline\" title\u003d\"5.0 out of 5 stars\"\u003e \u003cspan class\u003d\"a-declarative\" data-action\u003d\"a-popover\" data-a-popover\u003d\"{\u0026quot;max-width\u0026quot;:\u0026quot;700\u0026quot;,\u0026quot;closeButton\u0026quot;:\u0026quot;false\u0026quot;,\u0026quot;position\u0026quot;:\u0026quot;triggerBottom\u0026quot;,\u0026quot;url\u0026quot;:\u0026quot;/gp/customer-reviews/widgets/average-customer-review/popover/ref\u003ddpx_acr_pop_?contextId\u003ddpx\u0026amp;asin\u003d1628391340\u0026quot;}\"\u003e \u003ca 
 href\u003d\"javascript:void(0)\" class\u003d\"a-popover-trigger a-declarative\"\u003e \u003ci class\u003d\"a-icon a-icon-star a-star-5\"\u003e\u003cspan class\u003d\"a-icon-alt\"\u003e5.0 out of 5 stars\u003c/span\u003e\u003c/i\u003e \u003ci class\u003d\"a-icon a-icon-popover\"\u003e\u003c/i\u003e\u003c/a\u003e \u003c/span\u003e \u003cspan class\u003d\"a-letter-space\"\u003e\u003c/span\u003e \u003c/span\u003e \u003c/span\u003e    \u003cspan class\u003d\"a-letter-space\"\u003e\u003c/span\u003e    \u003cspan class\u003d\"a-declarative\" data-action\u003d\"acrLink-click-metrics\" data-acrlink-click-metrics\u003d\"{}\"\u003e \u003ca id\u003d\"acrCustomerReviewLink\" class\u003d\"a-link-normal\" href\u003d\"#customerReviews\"\u003e \u003cspan id\u003d\"acrCustomerReviewText\" class\u003d\"a-size-base\"\u003e1 customer review\u003c/span\u003e \u003c/a\u003e \u003c/span\u003e    \u003cscript type\u003d\"text/javascript\"\u003e                    P.when(\u0027A\u0027, \u0027ready\u0027).exe
 cute(function(A) {                        A.declarative(\u0027acrLink-click-metrics\u0027, \u0027click\u0027, { \"allowLinkDefault\" : true }, function(event){                            if(window.ue) {                                ue.count(\"acrLinkClickCount\", (ue.count(\"acrLinkClickCount\") || 0) + 1);                            }                        });                    });                \u003c/script\u003e    \u003cscript type\u003d\"text/javascript\"\u003e            P.when(\u0027A\u0027, \u0027cf\u0027).execute(function(A) {                A.declarative(\u0027acrStarsLink-click-metrics\u0027, \u0027click\u0027, { \"allowLinkDefault\" : true },  function(event){                    if(window.ue) {                        ue.count(\"acrStarsLinkWithPopoverClickCount\", (ue.count(\"acrStarsLinkWithPopoverClickCount\") || 0) + 1);                    }                });            });        \u003c/script\u003e   \u003c/div\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"ser
 iesTitle_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"seriesTitle\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"zeitgeistBadge_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"zeitgeistBadge\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"socialFabric_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"socialFabric\"\u003e  \u003c/div\u003e  \u003chr /\u003e  \u003cdiv id\u003d\"adoptedData\" class\u003d\"feature\" data-feature-name\u003d\"adoptedData\"\u003e   \u003cstyle\u003e    #adoptedDataContainer .campusLogo {        height:20px;        width:20px;        margin-right:1px;    }    #adoptedDataContainer .campusName {        line-height:20px;    }    #adoptedDataContainer.paddingMicro {        padding:4px !important;    }    #adoptedDataContainer .paddingLeftMicro {        padding-left:4px !important;    }    #adoptedDataContainer .paddingLeftSmall {        padding-left:10px !important;    }    #adoptedDataContainer .paddingRightSmall {  
       padding-right:10px !important;    }    #adoptedDataContainer .courseListExpanderContainer a:focus {        outline:none;    }    #adoptedDataContainer .courseColumn {        width:50%;        float:left;    }    #adoptedDataContainer .textRow {        display:inline-block;        width:100%;    }\u003c/style\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"MediaMatrix\" class\u003d\"feature\" data-feature-name\u003d\"MediaMatrix\"\u003e   \u003cdiv id\u003d\"formats\" class\u003d\"a-section a-spacing-large responsive\"\u003e   \u003ca class\u003d\"a-link-expander a-spacing-top-micro a-spacing-small a-size-small\"\u003e \u003ci id\u003d\"formatsIcon\" class\u003d\"a-icon a-icon-expand\"\u003e\u003c/i\u003e \u003cspan id\u003d\"showMoreFormatsPrompt\" class\u003d\"tmmShowPrompt\"\u003eSee all 3 formats and editions\u003c/span\u003e \u003cspan id\u003d\"hideMoreFormatsPrompt\" class\u003d\"tmmHidePrompt\"\u003eHide other formats and editions\u003c/span\u003e \u003c/a\u003e    \u003cd
 iv id\u003d\"twister\" class\u003d\"a-section a-spacing-base a-spacing-top-small a-size-mini\"\u003e     \u003c!--     This file is just a replica of table-aiv present in mediaMatrixtemplate package.    This has just been picked up and moved and there has not been addition/modification/removal    of any logic here . --\u003e     \u003ctable class\u003d\"a-normal a-spacing-none title\"\u003e      \u003ctbody\u003e      \u003ctr\u003e        \u003ctd class\u003d\"dp-title-col\"\u003e \u003c/td\u003e        \u003ctd class\u003d\"a-color-secondary a-text-right dp-price-col\"\u003e         \u003cdiv class\u003d\"a-fixed-right-grid\"\u003e         \u003cdiv class\u003d\"a-fixed-right-grid-inner\" style\u003d\"padding-right:50px\"\u003e           \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-left\" style\u003d\"padding-right:2.5%;*width:97.1%;float:left;\"\u003e            Price           \u003c/div\u003e           \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-right\" style\u00
 3d\"width:50px;margin-right:-50px;float:left;\"\u003e\u003c/div\u003e          \u003c/div\u003e        \u003c/div\u003e \u003c/td\u003e        \u003ctd class\u003d\"a-color-secondary a-text-right dp-new-col\"\u003e New from \u003c/td\u003e        \u003ctd class\u003d\"a-color-secondary a-text-right dp-used-col\"\u003e Used from \u003c/td\u003e       \u003c/tr\u003e      \u003c/tbody\u003e    \u003c/table\u003e     \u003cdiv class\u003d\"hr unselected-row\"\u003e\u003c/div\u003e     \u003cdiv class\u003d\"top-level unselected-row\"\u003e      \u003cspan class\u003d\"a-declarative\" data-action\u003d\"tmm-see-more-editions-click\" data-tmm-see-more-editions-click\u003d\"{\u0026quot;metabindingPlaceHolder\u0026quot;:1,\u0026quot;metabindingPlaceHolderState\u0026quot;:\u0026quot;0\u0026quot;,\u0026quot;metabindingUrl\u0026quot;:\u0026quot;/Before-First-Day-Stephen-Hale-ebook/dp/B00E4WYJT8/ref\u003dtmm_kin_title_0?_encoding\u003dUTF8\u0026amp;amp;qid\u003d\u0026amp;amp;sr\u003d\u0026quot
 ;}\" id\u003d\"declarative_1\"\u003e       \u003ctable class\u003d\"a-normal a-spacing-none\"\u003e        \u003ctbody\u003e        \u003ctr\u003e          \u003ctd class\u003d\"dp-title-col\"\u003e           \u003c!-- Do not show expander when DVD_redesign weblab is in treatment --\u003e \u003ca class\u003d\"empty-expander title-expander\" style\u003d\"vertical-align:middle;\"\u003e \u003cspan class\u003d\"metabinding-expander\"\u003e\u003c/span\u003e \u003c/a\u003e \u003ca href\u003d\"/Before-First-Day-Stephen-Hale-ebook/dp/B00E4WYJT8/ref\u003dtmm_kin_title_0?_encoding\u003dUTF8\u0026amp;qid\u003d\u0026amp;sr\u003d\" class\u003d\"title-text\"\u003e \u003cspan class\u003d\"a-size-small a-color-base\"\u003eKindle\u003c/span\u003e \u003cspan id\u003d\"tmmSpinnerDiv_1\" style\u003d\"display: none\" class\u003d\"tmmAjaxLoading\"\u003e\u003c/span\u003e \u003c/a\u003e \u003cspan id\u003d\"tmmErrorDiv_1\" class\u003d\"tmmErrorClass displayNone\"\u003e            \u003cdiv class\u003d\"a-b
 ox a-alert-inline a-alert-inline-error\"\u003e            \u003cdiv class\u003d\"a-box-inner a-alert-container\"\u003e             \u003ci class\u003d\"a-icon a-icon-alert\"\u003e\u003c/i\u003e             \u003cdiv class\u003d\"a-alert-content\"\u003e               \u003cspan\u003e\u0026quot;Please retry\u0026quot;\u003c/span\u003e              \u003c/div\u003e            \u003c/div\u003e           \u003c/div\u003e \u003c/span\u003e \u003c/td\u003e          \u003ctd class\u003d\"a-text-right dp-price-col\"\u003e           \u003cdiv class\u003d\"a-fixed-right-grid\"\u003e           \u003cdiv class\u003d\"a-fixed-right-grid-inner\" style\u003d\"padding-right:50px\"\u003e             \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-left\" style\u003d\"padding-right:2.5%;*width:97.1%;float:left;\"\u003e              \u003ca class\u003d\"a-link-normal\" href\u003d\"/Before-First-Day-Stephen-Hale-ebook/dp/B00E4WYJT8/ref\u003dtmm_kin_title_0?_encoding\u003dUTF8\u0026amp;qid\u003d\u0026
 amp;sr\u003d\"\u003e \u003cspan class\u003d\"a-size-small a-color-price\"\u003e$7.99\u003c/span\u003e \u003c/a\u003e             \u003c/div\u003e             \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-right\" style\u003d\"width:50px;margin-right:-50px;float:left;\"\u003e             \u003c/div\u003e            \u003c/div\u003e          \u003c/div\u003e \u003c/td\u003e          \u003ctd class\u003d\"a-color-tertiary a-text-right dp-new-col\"\u003e — \u003c/td\u003e          \u003ctd class\u003d\"a-color-tertiary a-text-right dp-used-col\"\u003e — \u003c/td\u003e         \u003c/tr\u003e        \u003c/tbody\u003e      \u003c/table\u003e \u003c/span\u003e     \u003c/div\u003e     \u003cdiv id\u003d\"metabinding_row_top_1\" class\u003d\"second-level\"\u003e\u003c/div\u003e     \u003cdiv id\u003d\"metabinding_row_bottom_1\" class\u003d\"second-level\"\u003e\u003c/div\u003e     \u003cdiv class\u003d\"hr selected-row\"\u003e\u003c/div\u003e     \u003cdiv class\u003d\"top-level 
 selected-row\"\u003e      \u003cspan class\u003d\"a-declarative\" data-action\u003d\"tmm-see-more-editions-click\" data-tmm-see-more-editions-click\u003d\"{\u0026quot;metabindingPlaceHolder\u0026quot;:2,\u0026quot;metabindingPlaceHolderState\u0026quot;:\u0026quot;0\u0026quot;,\u0026quot;metabindingUrl\u0026quot;:\u0026quot;#\u0026quot;}\" id\u003d\"declarative_2\"\u003e       \u003ctable class\u003d\"a-normal a-spacing-none\"\u003e        \u003ctbody\u003e        \u003ctr\u003e          \u003ctd class\u003d\"dp-title-col\"\u003e           \u003c!-- Do not show expander when DVD_redesign weblab is in treatment --\u003e \u003ca class\u003d\"a-link-expander title-expander\" style\u003d\"vertical-align:middle; padding-left:16px\"\u003e \u003cspan class\u003d\"a-link-expander metabinding-expander\"\u003e \u003ci id\u003d\"editionsIcon_2\" class\u003d\"a-icon a-icon-expand tmm-row-icon\"\u003e\u003c/i\u003e \u003c/span\u003e \u003c/a\u003e \u003cspan class\u003d\"title-text\"\u003e \u003c
 span class\u003d\"a-size-small a-color-base\"\u003ePaperback\u003c/span\u003e \u003cspan id\u003d\"tmmSpinnerDiv_2\" style\u003d\"display: none\" class\u003d\"tmmAjaxLoading\"\u003e\u003c/span\u003e \u003c/span\u003e \u003cspan id\u003d\"tmmErrorDiv_2\" class\u003d\"tmmErrorClass displayNone\"\u003e            \u003cdiv class\u003d\"a-box a-alert-inline a-alert-inline-error\"\u003e            \u003cdiv class\u003d\"a-box-inner a-alert-container\"\u003e             \u003ci class\u003d\"a-icon a-icon-alert\"\u003e\u003c/i\u003e             \u003cdiv class\u003d\"a-alert-content\"\u003e               \u003cspan\u003e\u0026quot;Please retry\u0026quot;\u003c/span\u003e              \u003c/div\u003e            \u003c/div\u003e           \u003c/div\u003e \u003c/span\u003e \u003c/td\u003e          \u003ctd class\u003d\"a-text-right dp-price-col\"\u003e           \u003cdiv class\u003d\"a-fixed-right-grid\"\u003e           \u003cdiv class\u003d\"a-fixed-right-grid-inner\" style\u003d\"padding
 -right:50px\"\u003e             \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-left\" style\u003d\"padding-right:2.5%;*width:97.1%;float:left;\"\u003e              \u003cspan class\u003d\"a-size-small a-color-price\"\u003e$15.49\u003c/span\u003e             \u003c/div\u003e             \u003cdiv class\u003d\"a-fixed-right-grid-col a-col-right\" style\u003d\"width:50px;margin-right:-50px;float:left;\"\u003e             \u003c/div\u003e            \u003c/div\u003e          \u003c/div\u003e \u003c/td\u003e          \u003ctd class\u003d\"a-text-right dp-new-col\"\u003e \u003ca class\u003d\"a-link-normal\" href\u003d\"/gp/offer-listing/1628391340/ref\u003dtmm_pap_new_olp_sr?ie\u003dUTF8\u0026amp;condition\u003dnew\u0026amp;qid\u003d\u0026amp;sr\u003d\"\u003e \u003cspan\u003e$9.44\u003c/span\u003e \u003c/a\u003e \u003c/td\u003e          \u003ctd class\u003d\"a-text-right dp-used-col\"\u003e \u003ca class\u003d\"a-link-normal\" href\u003d\"/gp/offer-listing/1628391340/ref\u003dtmm_pap
 _used_olp_sr?ie\u003dUTF8\u0026amp;condition\u003dused\u0026amp;qid\u003d\u0026amp;sr\u003d\"\u003e \u003cspan\u003e$12.17\u003c/span\u003e \u003c/a\u003e \u003c/td\u003e         \u003c/tr\u003e        \u003c/tbody\u003e      \u003c/table\u003e \u003c/span\u003e     \u003c/div\u003e     \u003cdiv id\u003d\"metabinding_row_top_2\" class\u003d\"second-level\"\u003e\u003c/div\u003e     \u003cdiv id\u003d\"metabinding_row_bottom_2\" class\u003d\"second-level\"\u003e\u003c/div\u003e     \u003cscript type\u003d\"a-state\" data-a-state\u003d\"{\u0026quot;key\u0026quot;:\u0026quot;mediamatrix-state\u0026quot;}\"\u003e{\"url_2\":\"/gp/media-matrix/fetch-DPX-expansion-data.html/ref\u003dtmm_pap_metabinding_expansion_sr?ie\u003dUTF8\u0026amp;tagActionCode\u003d\u0026amp;bindingCount\u003d2\u0026amp;websiteDisplayGroup\u003dbook_display_on_website\u0026amp;cor\u003dUS\u0026amp;storeID\u003d\u0026amp;qid\u003d\u0026amp;productTypeDefinition\u003dABIS_BOOK\u0026amp;viewID\u003dglance\u0026amp;lan
 dingAsin\u003d1628391340\u0026amp;customerID\u003d\u0026amp;tasParentAsin\u003dB00E5D53IM\u0026amp;asin\u003d1628391340\u0026amp;metabinding\u003dpaperback_meta_binding\u0026amp;metabindingIndex\u003d2\u0026amp;isPrime\u003d0\u0026amp;nodeID\u003d\u0026amp;sr\u003d\",\"isAjaxInProgress_1\":\"0\",\"url_1\":\"/gp/media-matrix/fetch-DPX-expansion-data.html/ref\u003dtmm_kin_metabinding_expansion_0?ie\u003dUTF8\u0026amp;tagActionCode\u003d\u0026amp;bindingCount\u003d1\u0026amp;websiteDisplayGroup\u003dbook_display_on_website\u0026amp;cor\u003dUS\u0026amp;storeID\u003d\u0026amp;qid\u003d\u0026amp;productTypeDefinition\u003dABIS_BOOK\u0026amp;viewID\u003dglance\u0026amp;landingAsin\u003d1628391340\u0026amp;customerID\u003d\u0026amp;tasParentAsin\u003dB00E5D53IM\u0026amp;asin\u003dB00E4WYJT8\u0026amp;metabinding\u003dkindle_meta_binding\u0026amp;metabindingIndex\u003d1\u0026amp;isPrime\u003d0\u0026amp;nodeID\u003d\u0026amp;sr\u003d\",\"isAjaxInProgress_2\":\"0\",\"isAjaxComplete_1\":\"0\",\
 "isAjaxComplete_2\":\"0\"}\u003c/script\u003e    \u003c/div\u003e    \u003cdiv id\u003d\"tmmSwatches\" class\u003d\"a-row nonJSFormats\"\u003e     \u003cul class\u003d\"a-nostyle a-button-list a-horizontal\"\u003e      \u003cli class\u003d\"swatchElement unselected\"\u003e\u003cspan class\u003d\"a-list-item\"\u003e \u003cspan class\u003d\"a-button a-spacing-mini a-button-toggle format\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003ca href\u003d\"/Before-First-Day-Stephen-Hale-ebook/dp/B00E4WYJT8/ref\u003dtmm_kin_swatch_0?_encoding\u003dUTF8\u0026amp;qid\u003d\u0026amp;sr\u003d\" class\u003d\"a-button-text\" role\u003d\"button\"\u003e \u003cspan\u003eKindle\u003c/span\u003e \u003cbr /\u003e \u003cspan class\u003d\"a-color-secondary\"\u003e \u003cspan\u003e $7.99 \u003c/span\u003e \u003c/span\u003e \u003c/a\u003e\u003c/span\u003e\u003c/span\u003e \u003cspan class\u003d\"tmm-olp-links\"\u003e \u003c/span\u003e \u003cspan class\u003d\"a-size-mini a-color-secondary tmm-olp-lin
 ks\"\u003e \u003ca id\u003d\"kcpAppsPopOver\" class\u003d\"a-size-mini a-link-normal kcpAppsPopOver\" href\u003d\"javascript:void(0);\"\u003e \u003cspan class\u003d\"kcpAppBaseBox_\"\u003e \u003cspan class\u003d\"kcpAppsPopOver\"\u003e Read with Our \u003cspan class\u003d\"a-color-price a-text-bold\"\u003e Free App \u003c/span\u003e \u003c/span\u003e \u003c/span\u003e \u003c/a\u003e \u003c/span\u003e \u003cspan class\u003d\"tmm-olp-links\"\u003e \u003c/span\u003e \u003c/span\u003e\u003c/li\u003e      \u003cli class\u003d\"swatchElement selected\"\u003e\u003cspan class\u003d\"a-list-item\"\u003e \u003cspan class\u003d\"a-button a-button-selected a-spacing-mini a-button-toggle format\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003ca href\u003d\"javascript:void(0)\" class\u003d\"a-button-text\" role\u003d\"button\"\u003e \u003cspan\u003ePaperback\u003c/span\u003e \u003cbr /\u003e \u003cspan class\u003d\"a-color-base\"\u003e \u003cspan class\u003d\"a-color-price\"\u003e $15.4
 9 \u003c/span\u003e \u003c/span\u003e \u003c/a\u003e\u003c/span\u003e\u003c/span\u003e \u003cspan class\u003d\"tmm-olp-links\"\u003e \u003c/span\u003e \u003cspan class\u003d\"tmm-olp-links\"\u003e \u003cspan class\u003d\"olp-used olp-link\"\u003e \u003ca class\u003d\"a-size-mini a-link-normal\" href\u003d\"/gp/offer-listing/1628391340/ref\u003dtmm_pap_used_olp_sr?ie\u003dUTF8\u0026amp;condition\u003dused\u0026amp;qid\u003d\u0026amp;sr\u003d\"\u003e 2 Used \u003cspan class\u003d\"olp-from\"\u003efrom\u003c/span\u003e $12.17 \u003c/a\u003e \u003c/span\u003e \u003cspan class\u003d\"olp-new olp-link\"\u003e \u003ca class\u003d\"a-size-mini a-link-normal\" href\u003d\"/gp/offer-listing/1628391340/ref\u003dtmm_pap_new_olp_sr?ie\u003dUTF8\u0026amp;condition\u003dnew\u0026amp;qid\u003d\u0026amp;sr\u003d\"\u003e 13 New \u003cspan class\u003d\"olp-from\"\u003efrom\u003c/span\u003e $9.44 \u003c/a\u003e \u003c/span\u003e \u003c/span\u003e \u003c/span\u003e\u003c/li\u003e     \u003c/ul\u003e    
 \u003c/div\u003e  \u003c/div\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"applicablePromotionList_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"applicablePromotionList\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"holidayDeliveryMessage_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"holidayDeliveryMessage\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"campusInfo\" class\u003d\"feature\" data-feature-name\u003d\"campusInfo\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"extraProductInfoFeatureGroup\" class\u003d\"feature\" data-feature-name\u003d\"extraProductInfoFeatureGroup\"\u003e   \u003cdiv id\u003d\"newerVersion_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"newerVersion\"\u003e   \u003c/div\u003e   \u003cdiv id\u003d\"productAlert_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"productAlert\"\u003e   \u003c/div\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"dynamicIframe_feature_div\" class\u003d\"feature\" data-feature
 -name\u003d\"dynamicIframe\"\u003e   \u003cscript id\u003d\"dynamic_iframe_CSS\" type\u003d\"text/undefined\"\u003ehtml {    font-size: 100%;}a:focus {    outline: thin dotted #333333;    outline-offset: -2px;}a:hover, a:active {    outline: 0 none;}body {     background-color: #FFFFFF;    margin:0;        /* from AUI */    color: #333333;    font-family: Arial,sans-serif;    font-size: 13px;    line-height: 19px;}h1, h2, h3, h4, h5, h6 {    margin: 0;    padding: 0;}h1, h2, h3, h4 {    font-family: Arial,sans-serif;    text-rendering: optimizelegibility;    padding-bottom: 4px;}h1:last-child, h2:last-child, h3:last-child, h4:last-child {    padding-bottom: 0;}h1 {    font-size: 28px;    font-weight: normal;    line-height: 1.2;}h2 {    font-size: 21px;    font-weight: normal;    line-height: 1.3;}h3 {    font-size: 17px;    font-weight: normal;    line-height: 1.24;}p {  margin: 0 0 14px;  padding: 0;}p:last-child {    margin-bottom: 0;}p + p {    margin-top: -4px;}b, strong {    f
 ont-weight: bold;}i, em {    font-style: italic;}blockquote {    margin: 13px;}small {    font-size: 12px;}img {    border: 0 none;    max-width: 100%;    vertical-align: top;}td {    float: none;    margin-right: 0;}td, th {    padding: 3px;    vertical-align: top;}td:first-child, th:first-child {    padding-left: 0;}td:last-child, th:last-child {    padding-right: 0;}th {    text-align: left;}tr:last-child td, tr:last-child th {    padding-bottom: 0;}tr:first-child td, tr:first-child th {    padding-top: 0;}a, a:visited, a:active, a:link {    color: #007EB9;    text-decoration: none;}a:hover {    color: #E47911;    cursor: pointer;    text-decoration: underline;}p a {    text-decoration: underline;}ul {    /*color: #AAAAAA; Overriding this style as the color is out of sync with the rest of book description*/    list-style-type: disc;}ol {    color: #888888;}ul, ol {    margin: 0 0 18px 22px;    padding: 0;}ul li, ol li {    margin: 0;    word-wrap: break-word;}ul:last-child, ol:la
 st-child {    margin-bottom: 0 !important;}#iframeContent {overflow: hidden;}h2.productDescriptionHeader {    margin-bottom: 0em;}.emptyClear {    clear:left;    height:0px;    font-size:0px;}div.productDescriptionWrapper {    margin: 0 0 1em 0;}h3.productDescriptionSource {    font-weight:normal;    color:#333333;    font-size:1.23em;    margin: .75em 0 .375em 0;    clear:left;}.seeAll {      margin-top: 1.25em;      margin-left: -15px;}#technicalProductFeatures ul {  list-style-type: disc;  margin: 1.12em 0;  margin-left: 20px;}#iframeContent ul {  list-style-type: disc;  margin-left: 20px;}ul li {  margin: 0 0 0 20px;}ul li ul {  list-style-type: disc;  margin-left: 20px;}ul li ul li {  margin: 0 0 0 20px;}.aplus h4, .aplus h5 {    margin: 0 0 .75em 0;    font-size: 1em;}.aplus h4 {    color: #CC6600;}.aplus p {    margin: 0 0 1em 0;}.aplus .break {    clear:both;    height:0px;    font-size:0px;}.aplus .spacer {    margin-bottom: 13px;}.aplus img {    border:none;}.aplus .leftIm
 age, .aplus .rightImage, .aplus .centerImage {    margin-bottom: 1em;    margin-top: 0;    text-align:center;    vertical-align:top;}.aplus .leftImage {    margin-right: 15px;    float:left;    clear:left;}.aplus .rightImage {    margin-left: 15px;    float:right;    clear:right;}.aplus .imageCaption {    clear:both;    padding: .5em .5em 0 .5em;    font-size: .846em;    display: block;}.aplus table.data {      border-collapse: collapse;      margin-bottom: 1.25em;}.aplus table.data th {      font-weight: bold;      background: #F7F7F7;      border-style:solid;      border-color: #CCCCCC;      border-width:0 0 1px 1px;}.aplus table.data td {      border-left: 1px solid #CCC;      border-bottom: 1px dotted #CCC}.aplus table.data th, .aplus table.data td{      padding:3px 10px;      text-align:left}.aplus table.data tfoot {      font-style: italic;}.aplus table.data caption {      background: #eee;      font-size: .8125em;}.aplus table.data tr td:first-child, .aplus table.data tr th:f
 irst-child {      border-left-width:0px;}.aplus ul {      margin:0 0 1em 0;}.aplus .center {      text-align: center;}.aplus .right {      text-align: right;}.aplus  .sixth-col,.aplus .fourth-col,.aplus .third-col,.aplus .half-col,.aplus .two-third-col,.aplus .three-fourth-col,.aplus .one-col {    float:left;    margin-right: 1.6760%;    overflow: hidden;}.aplus .last {    margin-right:0px;}.aplus .sixth-col {    width: 15.080%;}.aplus .fourth-col {    width: 23.4637%;}.aplus .third-col {    width: 31.8436%;}.aplus .half-col {    width: 48.6034%;}.aplus .two-third-col {    width: 65.3631%;}.aplus .three-fourth-col {    width: 73.7430%;}.aplus .one-col {    width: 98.8827%;    margin-right:0;}.aplus .last {    margin-right:0;}.aplus {    width: 100%;    min-width: 895px;}\u003c/script\u003e   \u003cscript type\u003d\"text/javascript\"\u003eP.register(\"DynamicIframe\", function(){function DynamicIframe(options) {    var nTries \u003d 0,      MAX_TRIES \u003d 20,      iframeId \u003d 
 options.iframeId,      encodedIframeContent \u003d options.encodedIframeContent,      iframeWrapperId \u003d options.iframeWrapperId,      initialResizeCallback \u003d options.initialResizeCallback,      iframeCSSId \u003d \"dynamic_iframe_CSS\";      iframeOverriddenCSSId \u003d options.overriddenCSSId;      this.createIframe \u003d function() {    var iframe \u003d document.createElement(\u0027iframe\u0027);    iframe.id \u003d iframeId;    iframe.className \u003d \"ap_never_hide\";    iframe.width \u003d \"100%\";    iframe.scrolling \u003d \"no\";    iframe.frameBorder \u003d \"0\";    onloadFn \u003d this.iframeload;        if ( iframe.addEventListener )      iframe.addEventListener(\"load\", onloadFn, false);    else if ( iframe.attachEvent )      iframe.attachEvent(\"onload\", onloadFn);    else iframe.onload \u003d onloadFn;        var wrapper \u003d document.getElementById(iframeWrapperId);    wrapper.innerHTML \u003d \u0027\u0027;    iframe \u003d wrapper.appendChild(ifram
 e);        var overriddenCSS \u003d \"\";    if (document.getElementById(iframeOverriddenCSSId)) {    \toverriddenCSS \u003d document.getElementById(iframeOverriddenCSSId).innerHTML;    }        var doc \u003d ((iframe.contentWindow \u0026\u0026 iframe.contentWindow.document) || iframe.contentDocument);    if (doc \u0026\u0026 doc.open) {      doc.open();      doc.writeln(\u0027\u003chtml\u003e\u003chead\u003e\u003cbase target\u003d\"_top\" /\u003e\u003c/\u0027 + \u0027head\u003e\u003cbody\u003e\u003cstyle class\u003d\"text/css\"\u003e\u0027 + document.getElementById(iframeCSSId).innerHTML + overriddenCSS + \u0027\u003c/style\u003e\u003cdiv id\u003d\"iframeContent\"\u003e\u0027 +               \u0027\u0027 + decodeURIComponent(encodedIframeContent) + \u0027\u003c/\u0027+\u0027div\u003e\u0027+\u0027\u003c/\u0027+\u0027body\u003e\u003c/html\u003e\u0027);      doc.close();    }  }    this.iframeload \u003d function () {    var iframe \u003d document.getElementById(iframeId);    iframe.
 style.display \u003d \u0027\u0027;    setTimeout(function () {      setIframeHeight(initialResizeCallback);    }, 20);   }    function getDocHeight(doc) {    var contentDiv \u003d doc.getElementById(\"iframeContent\");    var docHeight \u003d 0;    if(contentDiv){      docHeight \u003d Math.max(        contentDiv.scrollHeight,        contentDiv.offsetHeight,        contentDiv.clientHeight      );    }    return docHeight;  }    function setIframeHeight(resizeCallback) {    var iframeDoc, iframe \u003d document.getElementById(iframeId);    iframeDoc \u003d ((iframe.contentWindow \u0026\u0026 iframe.contentWindow.document) || iframe.contentDocument);    if (iframeDoc) {      var h \u003d getDocHeight(iframeDoc);      if (h \u0026\u0026 h !\u003d 0) {        iframe.style.height \u003d parseInt(h) + \u0027px\u0027;        if(typeof resizeCallback \u003d\u003d \"function\") {          resizeCallback(iframeId);\t        }      } else if (nTries \u003c MAX_TRIES) {        nTries++;        
 setTimeout(function () {            setIframeHeight(resizeCallback);        }, 50);      }    }  }    this.resizeIframe \u003d function(resizeCallback) {    nTries \u003d 0;\tsetIframeHeight(resizeCallback);  }}return DynamicIframe;});\u003c/script\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"bookDescription_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"bookDescription\"\u003e   \u003cscript id\u003d\"bookDesc_override_CSS\" type\u003d\"text/undefined\"\u003ebody {     font-size: 14px;    line-height: 1.6em;}.aplus {\tmin-width: inherit;}\u003c/script\u003e   \u003cnoscript\u003e    \u003cdiv\u003e     This is book one of a fantasy series for youths through adults. It is about Narn, a young man in a village of people that is still recent after the Creation. The Maker has Given him the special ability to sense people\u0027s feelings and has Called him to help people when their feelings would turn them from the Maker\u0027s Teachings. Unbeknownst to him, Narn is being 
 pursued by the Blackness, a force from the unMaker trying to undo all that the Maker has Made and that killed Narn\u0027s Father, who also shared Narn\u0027s special sensitivity. Narn discovers that he must choose between the Lessons his Family learned from the Maker and those of the Hunters, seeded from the unMaker. After his best friend, a Hunter, is killed while on a Hunt, Narn\u0027s own emotions blind him, and he ignores his Lessons to avenge his friend\u0027s death. He then finds that he must choose between life and death, not only for the once-revered beast that killed his friend, but also for himself-because his heart has Writings upon it that conflict with what he has seen take place before him. Which is more real? To which will he yield? This book sets the stage for the following series about Narn, which gradually deepens in the question of \u0026quot;choosing whom you will follow,\u0026quot; and gives the reader insight into the Writings on his/her own heart so he/she may
  be better prepared to make that choice. I have a master\u0027s degree in counseling psychology with special emphasis in personal and family challenges. I began to see that the underlying causes of most people\u0027s problems stemmed from a departure from Standards in their lives, consciously or unconsciously. This realization influenced me very strongly and I found myself writing about it in story form.   \u003c/div\u003e    \u003cem\u003e\u003c/em\u003e   \u003c/noscript\u003e   \u003cdiv id\u003d\"outer_postBodyPS\" style\u003d\"overflow: hidden; z-index: 1; height: 0px; display: block;\"\u003e    \u003cdiv id\u003d\"postBodyPS\" style\u003d\"overflow: hidden;\"\u003e     \u003cdiv id\u003d\"bookDesc_iframe_wrapper\" class\u003d\"maxReadableWidth\"\u003e\u003c/div\u003e    \u003c/div\u003e   \u003c/div\u003e   \u003cdiv id\u003d\"psPlaceHolder\" style\u003d\"height: 20px; display: none;\"\u003e    \u003cdiv style\u003d\"z-index: 3;\"\u003e     \u003ca class\u003d\"a-link-expander
  h2-expander\"\u003e \u003ci id\u003d\"bdExpanderIcon\" class\u003d\"a-icon a-icon-expand\"\u003e\u003c/i\u003e \u003cspan id\u003d\"bdSeeAllPrompt\"\u003eRead more\u003c/span\u003e \u003cspan id\u003d\"bdSeeLessPrompt\" style\u003d\"display: none;\"\u003eRead less\u003c/span\u003e \u003c/a\u003e    \u003c/div\u003e   \u003c/div\u003e   \u003cscript type\u003d\"text/javascript\"\u003e P.when(\u0027DynamicIframe\u0027).execute(function(DynamicIframe){\tvar BookDescriptionIframe \u003d null,\t    bookDescEncodedData \u003d \"This%20is%20book%20one%20of%20a%20fantasy%20series%20for%20youths%20through%20adults.%20It%20is%20about%20Narn%2C%20a%20young%20man%20in%20a%20village%20of%20people%20that%20is%20still%20recent%20after%20the%20Creation.%20The%20Maker%20has%20Given%20him%20the%20special%20ability%20to%20sense%20people%27s%20feelings%20and%20has%20Called%20him%20to%20help%20people%20when%20their%20feelings%20would%20turn%20them%20from%20the%20Maker%27s%20Teachings.%20Unbeknownst%20t
 o%20him%2C%20Narn%20is%20being%20pursued%20by%20the%20Blackness%2C%20a%20force%20from%20the%20unMaker%20trying%20to%20undo%20all%20that%20the%20Maker%20has%20Made%20and%20that%20killed%20Narn%27s%20Father%2C%20who%20also%20shared%20Narn%27s%20special%20sensitivity.%20Narn%20discovers%20that%20he%20must%20choose%20between%20the%20Lessons%20his%20Family%20learned%20from%20the%20Maker%20and%20those%20of%20the%20Hunters%2C%20seeded%20from%20the%20unMaker.%20After%20his%20best%20friend%2C%20a%20Hunter%2C%20is%20killed%20while%20on%20a%20Hunt%2C%20Narn%27s%20own%20emotions%20blind%20him%2C%20and%20he%20ignores%20his%20Lessons%20to%20avenge%20his%20friend%27s%20death.%20He%20then%20finds%20that%20he%20must%20choose%20between%20life%20and%20death%2C%20not%20only%20for%20the%20once-revered%20beast%20that%20killed%20his%20friend%2C%20but%20also%20for%20himself-because%20his%20heart%20has%20Writings%20upon%20it%20that%20conflict%20with%20what%20he%20has%20seen%20take%20place%20before%20him.%20
 Which%20is%20more%20real%3F%20To%20which%20will%20he%20yield%3F%20This%20book%20sets%20the%20stage%20for%20the%20following%20series%20about%20Narn%2C%20which%20gradually%20deepens%20in%20the%20question%20of%20%22choosing%20whom%20you%20will%20follow%2C%22%20and%20gives%20the%20reader%20insight%20into%20the%20Writings%20on%20his%2Fher%20own%20heart%20so%20he%2Fshe%20may%20be%20better%20prepared%20to%20make%20that%20choice.%20I%20have%20a%20master%27s%20degree%20in%20counseling%20psychology%20with%20special%20emphasis%20in%20personal%20and%20family%20challenges.%20I%20began%20to%20see%20that%20the%20underlying%20causes%20of%20most%20people%27s%20problems%20stemmed%20from%20a%20departure%20from%20Standards%20in%20their%20lives%2C%20consciously%20or%20unconsciously.%20This%20realization%20influenced%20me%20very%20strongly%20and%20I%20found%20myself%20writing%20about%20it%20in%20story%20form.\",\t    bookDescriptionAvailableHeight,\t    minBookDescriptionInitialHeight \u003d 112,\t    op
 tions \u003d {},\t    iframeId \u003d \"bookDesc_iframe\";     function resizeCallback() {        P.guardFatal(\"bookDescription\", function() {            // Get the line-height of the iframe            var iframe \u003d document.getElementById(iframeId);            var iframeDocument \u003d iframe.contentDocument;            if (false \u0026\u0026 iframeDocument \u0026\u0026 iframeDocument.defaultView) {                // Set the height to the number of lines specified                var numLines \u003d parseInt(0, 10);                 // Get the line-height of the iframe                var iframeContent \u003d iframeDocument.getElementById(\"iframeContent\");                 // Compute the line height                var lineHeight \u003d iframeDocument.defaultView.getComputedStyle(iframeContent, null).getPropertyValue(\"line-height\");                // Parse the line height                lineHeight \u003d parseFloat(lineHeight);                bookDescriptionAvailableHeight \u0
 03d Math.round(lineHeight * numLines);            } else {                var bdOffsetTop \u003d document.getElementById(\"bookDescription_feature_div\").offsetTop;                var imageBlockOffsetTop \u003d document.getElementById(\"booksImageBlock_feature_div\").offsetTop;                var imageBlockHeight \u003d document.getElementById(\"booksImageBlock_feature_div\").offsetHeight;                bookDescriptionAvailableHeight \u003d imageBlockOffsetTop + imageBlockHeight -bdOffsetTop - 30;                if(bookDescriptionAvailableHeight \u003c minBookDescriptionInitialHeight) {                    bookDescriptionAvailableHeight \u003d minBookDescriptionInitialHeight;                }            }    \t\t            var psTotalHeight \u003d document.getElementById(\"postBodyPS\").offsetHeight;             if(psTotalHeight \u003e bookDescriptionAvailableHeight + 30){                if(document.getElementById(\"bdSeeLessPrompt\").style.display \u003d\u003d \"none\"){          
           document.getElementById(\"outer_postBodyPS\").style.height \u003d bookDescriptionAvailableHeight + \u0027px\u0027;                    document.getElementById(\"psPlaceHolder\").style.display \u003d\"block\";                    document.getElementById(\"bdSeeAllPrompt\").style.display \u003d\"block\";                }                else{                    document.getElementById(\"outer_postBodyPS\").style.height \u003d psTotalHeight + \u0027px\u0027;                }            }            else{                document.getElementById(\"outer_postBodyPS\").style.height \u003d psTotalHeight + \u0027px\u0027;                document.getElementById(\"psPlaceHolder\").style.display \u003d\"none\";                document.getElementById(\"bdSeeAllPrompt\").style.display \u003d\"block\";                document.getElementById(\"bdSeeLessPrompt\").style.display \u003d\"none\";                document.getElementById(\"bdExpanderIcon\").className \u003d document.getElementById(\"
 bdExpanderIcon\").className.replace(\"rotate\",\"\");            }        })();    }    options.iframeId \u003d iframeId;    options.iframeWrapperId \u003d \"bookDesc_iframe_wrapper\";\toptions.overriddenCSSId \u003d \"bookDesc_override_CSS\";\toptions.encodedIframeContent \u003d bookDescEncodedData;\toptions.initialResizeCallback \u003d resizeCallback;\t\tBookDescriptionIframe \u003d new DynamicIframe(options);\tP.guardFatal(\"bookDescription\", function() {\t    BookDescriptionIframe.createIframe();\t}) ();\t    \tif ((typeof BookDescriptionIframe !\u003d \u0027undefined\u0027) \u0026\u0026 (BookDescriptionIframe instanceof DynamicIframe)) {\t  P.when(\u0027jQuery\u0027).execute(function($) {\t    $(window).resize(function() {\t        P.guardFatal(\"bookDescription\", function() {\t            BookDescriptionIframe.resizeIframe(resizeCallback);\t        }) ();\t    });\t    $(window).bind(\u0027imageResize\u0027, function() {\t        P.guardFatal(\"bookDescription\", function() 
 {\t            BookDescriptionIframe.resizeIframe(resizeCallback);\t        }) ();\t    });\t  });\t}});\u003c/script\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"edpIngress_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"edpIngress\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"heroQuickPromoBooksAtf_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"heroQuickPromoBooksAtf\"\u003e   \u003cdiv id\u003d\"hero-quick-promo\" class\u003d\"a-row a-spacing-medium\"\u003e    \u003chr class\u003d\"a-spacing-medium a-divider-normal\" /\u003e    \u003c!--wlhqp--\u003e    \u003cdiv class\u003d\"qpImage\"\u003e     \u003ca href\u003d\"/gp/redirect.html/ref\u003damb_link_475358582_1/154-5469300-3928621?ie\u003dUTF8\u0026amp;location\u003dhttp%3A%2F%2Fwww.omnivoracious.com%2F\u0026amp;source\u003dstandards\u0026amp;token\u003dDEF1D0758E667C928314D8D6645CBCC4A8DBCD89\u0026amp;pf_rd_m\u003dATVPDKIKX0DER\u0026amp;pf_rd_s\u003dhero-quick-promo-books-atf\u0026amp;pf_rd_r\u003d
 W7WQP2BDJ090798P990F\u0026amp;pf_rd_t\u003d201\u0026amp;pf_rd_p\u003d2546723202\u0026amp;pf_rd_i\u003d1628391340\"\u003e\u003cimg src\u003d\"https://images-na.ssl-images-amazon.com/images/G/01/img15/books/other/17002_books_ABR-hqp_75x75._SL75_CB330842061_.jpg\" width\u003d\"75\" align\u003d\"left\" alt\u003d\"The Amazon Book Review\" height\u003d\"75\" border\u003d\"0\" /\u003e\u003c/a\u003e   \u003c/div\u003e    \u003cspan class\u003d\"qpHeadline\"\u003eThe Amazon Book Review\u003c/span\u003e   \u003cbr /\u003e Author interviews, book reviews, editors picks, and more.    \u003ca href\u003d\"/gp/redirect.html/ref\u003damb_link_475358582_3/154-5469300-3928621?ie\u003dUTF8\u0026amp;location\u003dhttp%3A%2F%2Fwww.omnivoracious.com%2F\u0026amp;source\u003dstandards\u0026amp;token\u003dDEF1D0758E667C928314D8D6645CBCC4A8DBCD89\u0026amp;pf_rd_m\u003dATVPDKIKX0DER\u0026amp;pf_rd_s\u003dhero-quick-promo-books-atf\u0026amp;pf_rd_r\u003dW7WQP2BDJ090798P990F\u0026amp;pf_rd_t\u003d201\u0026amp;p
 f_rd_p\u003d2546723202\u0026amp;pf_rd_i\u003d1628391340\"\u003eRead it now\u003c/a\u003e   \u003c/div\u003e   \u003cdiv style\u003d\"clear:left; margin-bottom:5px\"\u003e\u003c/div\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"promotionalBundle_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"promotionalBundle\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"andonCord_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"andonCord\"\u003e  \u003c/div\u003e  \u003cdiv id\u003d\"OosRecommendation_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"OosRecommendation\"\u003e  \u003c/div\u003e \u003c/div\u003e"],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":1},{"name":"IMAGE","dataType":"STRING","values":[""],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":0},{"name":"SELLER_INFO","dataType":"STRING","values":[""],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":0},{"name":"PRODUCT_DETAILS","dataType":"STRING
 ","values":["\u003cdiv id\u003d\"detail-bullets\"\u003e  \u003ca name\u003d\"productDetails\" id\u003d\"productDetails\"\u003e\u003c/a\u003e  \u003chr noshade\u003d\"noshade\" size\u003d\"1\" class\u003d\"bucketDivider\" /\u003e  \u003ctable cellpadding\u003d\"0\" cellspacing\u003d\"0\" border\u003d\"0\" id\u003d\"productDetailsTable\"\u003e   \u003ctbody\u003e   \u003ctr\u003e     \u003ctd class\u003d\"bucket\"\u003e \u003ch2\u003eProduct Details\u003c/h2\u003e      \u003cdiv class\u003d\"content\"\u003e       \u003cul\u003e        \u003cli\u003e\u003cb\u003ePaperback:\u003c/b\u003e 200 pages\u003c/li\u003e        \u003cli\u003e\u003cb\u003ePublisher:\u003c/b\u003e Xulon Press (July 22, 2013)\u003c/li\u003e        \u003cli\u003e\u003cb\u003eLanguage:\u003c/b\u003e English\u003c/li\u003e        \u003cli\u003e\u003cb\u003eISBN-10:\u003c/b\u003e 1628391340\u003c/li\u003e        \u003cli\u003e\u003cb\u003eISBN-13:\u003c/b\u003e 978-1628391343\u003c/li\u003e        \u003cli\u003e\u003cb
 \u003e Product Dimensions: \u003c/b\u003e 6.1 x 0.4 x 9.2 inches \u003c/li\u003e        \u003cli\u003e\u003cb\u003eShipping Weight:\u003c/b\u003e 10.2 ounces (\u003ca href\u003d\"/gp/help/seller/shipping.html/ref\u003ddp_pd_shipping/154-5469300-3928621?ie\u003dUTF8\u0026amp;asin\u003d1628391340\u0026amp;seller\u003dATVPDKIKX0DER\"\u003eView shipping rates and policies\u003c/a\u003e)\u003c/li\u003e        \u003cli\u003e\u003cb\u003eAverage Customer Review:\u003c/b\u003e \u003cscript type\u003d\"text/javascript\"\u003efunction acrPopoverHover(e, h)  {    if(h) window.acrAsinHover \u003d e;    else if(window.acrAsinHover \u003d\u003d e) window.acrAsinHover \u003d null;}function popoverReady(jQuery) {        if((typeof jQuery \u003d\u003d\u003d \"undefined\") || (typeof jQuery.fn \u003d\u003d\u003d \"undefined\")) {            return;        } else if(jQuery.fn.acrPopover) {            return;        }        var popoverConfig \u003d {            showOnHover:true,            showCloseBu
 tton: true,            width: null,            location:\u0027bottom\u0027,            locationAlign:\u0027left\u0027,            locationOffset:[-20,0],            paddingLeft: 15,            paddingBottom: 5,            paddingRight: 15,            group: \u0027reviewsPopover\u0027,            clone:false,            hoverHideDelay:300        };        jQuery.fn.acrPopover \u003d function() {            return this.each(function() {                var $this \u003d jQuery(this);                if(!$this.data(\u0027init\u0027) \u0026\u0026 typeof $this.amazonPopoverTrigger \u003d\u003d\u003d \u0027function\u0027) {                    $this.data(\u0027init\u0027, 1);                    var getargs \u003d $this.attr(\u0027getargs\u0027);                    var ajaxURL \u003d \u0027/gp/customer-reviews/common/du/displayHistoPopAjax.html?\u0027 +                             \u0027\u0026ASIN\u003d\u0027 + $this.attr(\u0027name\u0027) +                             \u0027\u0026link\u003d1\
 u0027 +                             \u0027\u0026seeall\u003d1\u0027 +                             \u0027\u0026ref\u003d\u0027 + $this.attr(\u0027ref\u0027) +                             ((typeof getargs !\u003d \u0027undefined\u0027) ? (\u0027\u0026getargs\u003d\u0027 + getargs) : \u0027\u0027);                    var myConfig \u003d jQuery.extend(true, { destination: ajaxURL }, popoverConfig);                    $this.amazonPopoverTrigger(myConfig);                    var w \u003d window.acrAsinHover;                    if(w \u0026\u0026 jQuery(w).parents(\u0027.asinReviewsSummary\u0027).get(0) \u003d\u003d this) {                        $this.trigger(\u0027mouseover.amzPopover\u0027);                        window.acrAsinHover \u003d null;                    }                }            });        };        window.reviewHistPopoverConfig \u003d popoverConfig;        var jqInit \u003d window.jQueryInitHistoPopovers \u003d function(asin) {            if (typeof jQuery(\u0027.acr-po
 pover[name\u003d\u0027 + asin + \u0027]\u0027).acrPopover \u003d\u003d\u003d \u0027function\u0027) {                jQuery(\u0027.acr-popover[name\u003d\u0027 + asin + \u0027]\u0027).acrPopover();            }        };        window.doInit_average_customer_reviews \u003d jqInit;        window.onAjaxUpdate_average_customer_reviews \u003d jqInit;        window.onCacheUpdate_average_customer_reviews \u003d jqInit;        window.onCacheUpdateReselect_average_customer_reviews \u003d jqInit;        if (typeof P !\u003d\u003d \u0027undefined\u0027) {            P.when(\u0027amznJQ.criticalFeature\u0027).execute(\"amznJQ.criticalFeatureComplete\", function() {                setTimeout(function() { P.register(\u0027acrPopover\u0027, function () {                    return window.arcPopover;                }); }, 10);            });        } else if (typeof amznJQ !\u003d\u003d \u0027undefined\u0027) {            amznJQ.onCompletion(\u0027amznJQ.criticalFeature\u0027, function() {          
       setTimeout(function() { amznJQ.declareAvailable(\u0027acrPopover\u0027); }, 10);            });        }};function acrPopoverReady(jQuery) {    jQuery(\u0027.acr-popover,#searchTemplate .asinReviewsSummary\u0027).each(function() {        if (typeof jQuery(this).acrPopover \u003d\u003d\u003d \u0027function\u0027) {            jQuery(this).acrPopover();        }    });};if (typeof P !\u003d\u003d \u0027undefined\u0027) {    var jq \u003d null;    if (typeof $ !\u003d\u003d \u0027undefined\u0027) {        jq \u003d $;    } else if (typeof jQuery !\u003d\u003d \u0027undefined\u0027) {        jq \u003d jQuery;    }    if (jq) {        P.when(\u0027popover\u0027, \u0027ready\u0027).execute(popoverReady(jq));        P.when(\u0027acrPopover\u0027, \u0027ready\u0027).execute(acrPopoverReady(jq));    }} else if ((typeof amznJQ !\u003d\u003d \u0027undefined\u0027) \u0026\u0026 (typeof jQuery !\u003d\u003d \u0027undefined\u0027)) {    amznJQ.onReady(\u0027popover\u0027, popoverReady, jQue
 ry);    amznJQ.onReady(\u0027acrPopover\u0027, acrPopoverReady, jQuery);};\u003c/script\u003e \u003cspan class\u003d\"crAvgStars\" style\u003d\"white-space:no-wrap;\"\u003e\u003cspan class\u003d\"asinReviewsSummary acr-popover\" name\u003d\"1628391340\" ref\u003d\"dp_db_cm_cr_acr_pop_\"\u003e \u003ca style\u003d\"cursor:pointer;text-decoration:none\" href\u003d\"https://www.amazon.com/Before-First-Day-Stephen-Hale/product-reviews/1628391340/ref\u003ddp_db_cm_cr_acr_img/154-5469300-3928621?ie\u003dUTF8\u0026amp;showViewpoints\u003d1\" name\u003d\"reviewHistoPop_1628391340__star__\" onmouseover\u003d\"return acrPopoverHover(this,1);\" onmouseout\u003d\"return acrPopoverHover(this,0);\"\u003e\u003cspan class\u003d\"swSprite s_star_5_0 \" title\u003d\"5.0 out of 5 stars\"\u003e\u003cspan\u003e5.0 out of 5 stars\u003c/span\u003e\u003c/span\u003e\u0026nbsp;\u003c/a\u003e\u0026nbsp;\u003cspan class\u003d\"histogramButton\" style\u003d\"margin-left:-3px\"\u003e\u003ca style\u003d\"cursor:po
 inter\" href\u003d\"https://www.amazon.com/Before-First-Day-Stephen-Hale/product-reviews/1628391340/ref\u003ddp_db_cm_cr_acr_img/154-5469300-3928621?ie\u003dUTF8\u0026amp;showViewpoints\u003d1\" name\u003d\"reviewHistoPop_1628391340__button__\" onmouseover\u003d\"return acrPopoverHover(this,1);\" onmouseout\u003d\"return acrPopoverHover(this,0);\"\u003e\u003cspan class\u003d\"swSprite s_chevron \"\u003e\u003cspan\u003eSee all reviews\u003c/span\u003e\u003c/span\u003e\u0026nbsp;\u003c/a\u003e\u003c/span\u003e\u003c/span\u003e(\u003ca href\u003d\"https://www.amazon.com/Before-First-Day-Stephen-Hale/product-reviews/1628391340/ref\u003ddp_db_cm_cr_acr_txt/154-5469300-3928621?ie\u003dUTF8\u0026amp;showViewpoints\u003d1\"\u003e1 customer review\u003c/a\u003e)\u003c/span\u003e\u003c/li\u003e        \u003cli id\u003d\"SalesRank\"\u003e \u003cb\u003eAmazon Best Sellers Rank:\u003c/b\u003e #9,398,863 in Books (\u003ca href\u003d\"https://www.amazon.com/best-sellers-books-Amazon/zgbs/books/ref
 \u003dpd_dp_ts_b_1/154-5469300-3928621\"\u003eSee Top 100 in Books\u003c/a\u003e) \u003cstyle type\u003d\"text/css\"\u003e.zg_hrsr { margin: 0; padding: 0; list-style-type: none; }.zg_hrsr_item { margin: 0 0 0 10px; }.zg_hrsr_rank { display: inline-block; width: 80px; text-align: right; }\u003c/style\u003e         \u003cul class\u003d\"zg_hrsr\"\u003e          \u003cli class\u003d\"zg_hrsr_item\"\u003e \u003cspan class\u003d\"zg_hrsr_rank\"\u003e#6112\u003c/span\u003e \u003cspan class\u003d\"zg_hrsr_ladder\"\u003ein\u0026nbsp;\u003ca href\u003d\"https://www.amazon.com/best-sellers-books-Amazon/zgbs/books/ref\u003dpd_zg_hrsr_b_1_1/154-5469300-3928621\"\u003eBooks\u003c/a\u003e \u0026gt; \u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/12290/ref\u003dpd_zg_hrsr_b_1_2/154-5469300-3928621\"\u003eChristian Books \u0026amp; Bibles\u003c/a\u003e \u0026gt; \u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/172806/ref\u003dpd_zg_hrsr_b_1_3/154-5469300-3928621\"\u0
 03eLiterature \u0026amp; Fiction\u003c/a\u003e \u0026gt; \u003cb\u003e\u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/7259433011/ref\u003dpd_zg_hrsr_b_1_4_last/154-5469300-3928621\"\u003eFantasy\u003c/a\u003e\u003c/b\u003e\u003c/span\u003e \u003c/li\u003e          \u003cli class\u003d\"zg_hrsr_item\"\u003e \u003cspan class\u003d\"zg_hrsr_rank\"\u003e#105141\u003c/span\u003e \u003cspan class\u003d\"zg_hrsr_ladder\"\u003ein\u0026nbsp;\u003ca href\u003d\"https://www.amazon.com/best-sellers-books-Amazon/zgbs/books/ref\u003dpd_zg_hrsr_b_2_1/154-5469300-3928621\"\u003eBooks\u003c/a\u003e \u0026gt; \u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/17/ref\u003dpd_zg_hrsr_b_2_2/154-5469300-3928621\"\u003eLiterature \u0026amp; Fiction\u003c/a\u003e \u0026gt; \u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/10134/ref\u003dpd_zg_hrsr_b_2_3/154-5469300-3928621\"\u003eGenre Fiction\u003c/a\u003e \u0026gt; \u003cb\u003e\u003ca href\u003d\"https://www.am
 azon.com/gp/bestsellers/books/12489/ref\u003dpd_zg_hrsr_b_2_4_last/154-5469300-3928621\"\u003eReligious \u0026amp; Inspirational\u003c/a\u003e\u003c/b\u003e\u003c/span\u003e \u003c/li\u003e          \u003cli class\u003d\"zg_hrsr_item\"\u003e \u003cspan class\u003d\"zg_hrsr_rank\"\u003e#1162374\u003c/span\u003e \u003cspan class\u003d\"zg_hrsr_ladder\"\u003ein\u0026nbsp;\u003ca href\u003d\"https://www.amazon.com/best-sellers-books-Amazon/zgbs/books/ref\u003dpd_zg_hrsr_b_3_1/154-5469300-3928621\"\u003eBooks\u003c/a\u003e \u0026gt; \u003cb\u003e\u003ca href\u003d\"https://www.amazon.com/gp/bestsellers/books/22/ref\u003dpd_zg_hrsr_b_3_2_last/154-5469300-3928621\"\u003eReligion \u0026amp; Spirituality\u003c/a\u003e\u003c/b\u003e\u003c/span\u003e \u003c/li\u003e         \u003c/ul\u003e \u003c/li\u003e       \u003c/ul\u003e       \u003cspan class\u003d\"tiny\"\u003e        \u003cul class\u003d\"noteBullets\"\u003e        \u003c/ul\u003e \u003c/span\u003e      \u003c/div\u003e \u003c/td\u003
 e    \u003c/tr\u003e   \u003c/tbody\u003e \u003c/table\u003e \u003c/div\u003e"],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":1},{"name":"TECH_DETAILS","dataType":"STRING","values":[""],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":0},{"name":"CAPTCHA","dataType":"STRING","values":[""],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":0},{"name":"PRODUCT_IDENTIFIER","dataType":"STRING","values":[""],"defaultValue":"","isMetaAttribute":false,"extractedValuesCount":0},{"name":"BUYBOX","dataType":"STRING","values":["\u003cdiv id\u003d\"unifiedBuyBox_feature_div\" class\u003d\"feature\" data-feature-name\u003d\"unifiedBuyBox\"\u003e  \u003cdiv id\u003d\"combinedBuyBox\" class\u003d\"a-section a-spacing-medium\"\u003e   \u003cform method\u003d\"post\" id\u003d\"addToCart\" action\u003d\"/gp/product/handle-buy-box/ref\u003ddp_start-bbf_1_glance\" class\u003d\"a-content\"\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"session-id\" n
 ame\u003d\"session-id\" value\u003d\"154-5469300-3928621\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"ASIN\" name\u003d\"ASIN\" value\u003d\"1628391340\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"offerListingID\" name\u003d\"offerListingID\" value\u003d\"P0YRnR9szBbdW1hK5VhfhfwyIoQrnhuEd%2BPy%2BBgCbCzNct9GAa8y5i0LbWFzbHdQ3KzbWuQMLfVSXokLOyYlLfXo60wCnhwcYD%2BQpFk1lcQBrvHvvzu1mw%3D%3D\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"isMerchantExclusive\" name\u003d\"isMerchantExclusive\" value\u003d\"0\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"merchantID\" name\u003d\"merchantID\" value\u003d\"ATVPDKIKX0DER\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"isAddon\" name\u003d\"isAddon\" value\u003d\"0\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"nodeID\" name\u003d\"nodeID\" value\u003d\"283155\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"sellingCustomerID\" name\u003d\"sellingCustomerID\" value\u003d\"A2R2RITDJN
 W1Q6\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"qid\" name\u003d\"qid\" value\u003d\"\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"sr\" name\u003d\"sr\" value\u003d\"\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"storeID\" name\u003d\"storeID\" value\u003d\"books\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"tagActionCode\" name\u003d\"tagActionCode\" value\u003d\"283155\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"viewID\" name\u003d\"viewID\" value\u003d\"glance\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"rsid\" name\u003d\"rsid\" value\u003d\"154-5469300-3928621\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"sourceCustomerOrgListID\" name\u003d\"sourceCustomerOrgListID\" value\u003d\"\" /\u003e    \u003cinput type\u003d\"hidden\" id\u003d\"sourceCustomerOrgListItemID\" name\u003d\"sourceCustomerOrgListItemID\" value\u003d\"\" /\u003e    \u003cinput type\u003d\"hidden\" name\u003d\"wlPopCommand\" value\u00
 3d\"\" /\u003e    \u003cdiv id\u003d\"buybox\" class\u003d\"a-row a-spacing-medium\"\u003e     \u003cdiv id\u003d\"rbbContainer\" class\u003d\"a-box-group dp-accordion\"\u003e      \u003cdiv class\u003d\"a-box rbbSection selected dp-accordion-active\"\u003e      \u003cdiv class\u003d\"a-box-inner\"\u003e        \u003cdiv class\u003d\"a-section a-spacing-none a-padding-none\"\u003e         \u003cdiv id\u003d\"buyNewSection\" class\u003d\"rbbHeader dp-accordion-row\"\u003e          \u003ci class\u003d\"a-icon a-icon-radio-active\"\u003e\u003c/i\u003e          \u003ca class\u003d\"a-link-normal rbbHeaderLink\" href\u003d\"/gp/product/1628391340?selectObb\u003dnew\"\u003e \u003ch5\u003e            \u003cdiv class\u003d\"a-row\"\u003e             \u003cdiv class\u003d\"a-column a-span4 a-text-left a-nowrap\"\u003e              \u003cspan class\u003d\"a-text-bold\"\u003eBuy New\u003c/span\u003e             \u003c/div\u003e             \u003cdiv class\u003d\"a-column a-span8 a-text-right a
 -span-last\"\u003e              \u003cdiv class\u003d\"inlineBlock-display\"\u003e               \u003cspan class\u003d\"a-letter-space\"\u003e\u003c/span\u003e               \u003cspan class\u003d\"a-size-medium a-color-price offer-price a-text-normal\"\u003e$15.49\u003c/span\u003e              \u003c/div\u003e             \u003c/div\u003e            \u003c/div\u003e \u003c/h5\u003e \u003c/a\u003e         \u003c/div\u003e         \u003cdiv id\u003d\"buyNewInner\" class\u003d\"rbbContent dp-accordion-inner\"\u003e          \u003cdiv id\u003d\"buyBoxInner\" class\u003d\"a-section a-spacing-none\"\u003e           \u003cdiv class\u003d\"a-row\"\u003e            \u003cdiv class\u003d\"a-column a-span5\"\u003e             \u003cdiv id\u003d\"selectQuantity\" class\u003d\"a-section a-spacing-none a-padding-none\"\u003e              \u003cspan class\u003d\"a-declarative\" data-action\u003d\"quantity-dropdown\" data-quantity-dropdown\u003d\"{}\"\u003e \u003cspan class\u003d\"a-dropdown-cont
 ainer\"\u003e\u003clabel for\u003d\"quantity\" class\u003d\"a-native-dropdown\"\u003eQty:\u003c/label\u003e\u003cselect name\u003d\"quantity\" autocomplete\u003d\"off\" id\u003d\"quantity\" tabindex\u003d\"-1\" class\u003d\"a-native-dropdown\"\u003e \u003coption value\u003d\"1\" selected\u003d\"\"\u003e1 \u003c/option\u003e \u003coption value\u003d\"2\"\u003e2 \u003c/option\u003e \u003coption value\u003d\"3\"\u003e3 \u003c/option\u003e \u003coption value\u003d\"4\"\u003e4 \u003c/option\u003e \u003coption value\u003d\"5\"\u003e5 \u003c/option\u003e \u003coption value\u003d\"6\"\u003e6 \u003c/option\u003e \u003coption value\u003d\"7\"\u003e7 \u003c/option\u003e \u003coption value\u003d\"8\"\u003e8 \u003c/option\u003e \u003coption value\u003d\"9\"\u003e9 \u003c/option\u003e \u003coption value\u003d\"10\"\u003e10 \u003c/option\u003e \u003coption value\u003d\"11\"\u003e11 \u003c/option\u003e \u003coption value\u003d\"12\"\u003e12 \u003c/option\u003e \u003coption value\u003d\"13\"\u003e13
  \u003c/option\u003e \u003coption value\u003d\"14\"\u003e14 \u003c/option\u003e \u003coption value\u003d\"15\"\u003e15 \u003c/option\u003e \u003coption value\u003d\"16\"\u003e16 \u003c/option\u003e \u003coption value\u003d\"17\"\u003e17 \u003c/option\u003e \u003coption value\u003d\"18\"\u003e18 \u003c/option\u003e \u003coption value\u003d\"19\"\u003e19 \u003c/option\u003e \u003coption value\u003d\"20\"\u003e20 \u003c/option\u003e \u003coption value\u003d\"21\"\u003e21 \u003c/option\u003e \u003coption value\u003d\"22\"\u003e22 \u003c/option\u003e \u003coption value\u003d\"23\"\u003e23 \u003c/option\u003e \u003coption value\u003d\"24\"\u003e24 \u003c/option\u003e \u003coption value\u003d\"25\"\u003e25 \u003c/option\u003e \u003coption value\u003d\"26\"\u003e26 \u003c/option\u003e \u003coption value\u003d\"27\"\u003e27 \u003c/option\u003e \u003coption value\u003d\"28\"\u003e28 \u003c/option\u003e \u003coption value\u003d\"29\"\u003e29 \u003c/option\u003e \u003coption value\u003d\"30\"\u
 003e30 \u003c/option\u003e \u003c/select\u003e\u003cspan tabindex\u003d\"-1\" class\u003d\"a-button a-button-dropdown a-button-small\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003cspan class\u003d\"a-button-text a-declarative\" data-action\u003d\"a-dropdown-button\" role\u003d\"button\" tabindex\u003d\"0\" aria-hidden\u003d\"true\"\u003e\u003cspan class\u003d\"a-dropdown-label\"\u003eQty:\u003c/span\u003e\u003cspan class\u003d\"a-dropdown-prompt\"\u003e1\u003c/span\u003e\u003c/span\u003e\u003ci class\u003d\"a-icon a-icon-dropdown\"\u003e\u003c/i\u003e\u003c/span\u003e\u003c/span\u003e\u003c/span\u003e \u003c/span\u003e             \u003c/div\u003e            \u003c/div\u003e            \u003cdiv class\u003d\"a-column a-span7 a-text-right a-span-last\"\u003e             \u003cul class\u003d\"a-nostyle a-vertical\"\u003e             \u003c/ul\u003e            \u003c/div\u003e           \u003c/div\u003e          \u003c/div\u003e          \u003cdiv class\u003d\"a-section a-s
 pacing-small a-spacing-top-micro\"\u003e           \u003cdiv class\u003d\"a-row\"\u003e            \u003cspan class\u003d\"a-color-base buyboxShippingLabel\"\u003e             \u003c!-- MsgId:cfs_free_shipping_eligible_no_popover_link:web --\u003e \u003ca href\u003d\"/gp/help/customer/display.html/ref\u003dmk_sss_dp_1?ie\u003dUTF8\u0026amp;pop-up\u003d1\u0026amp;nodeId\u003d527692\" target\u003d\"AmazonHelp\" onclick\u003d\"return amz_js_PopWin(this.href,\u0027AmazonHelp\u0027,\u0027width\u003d550,height\u003d550,resizable\u003d1,scrollbars\u003d1,toolbar\u003d0,status\u003d0\u0027);\"\u003eFREE Shipping\u003c/a\u003e on orders with at least $25 of books.\u003c/span\u003e           \u003c/div\u003e          \u003c/div\u003e          \u003cdiv class\u003d\"a-section a-spacing-none\"\u003e          \u003c/div\u003e          \u003cdiv class\u003d\"a-section a-spacing-small\"\u003e           \u003cdiv id\u003d\"availability\" class\u003d\"a-section a-spacing-none\"\u003e            \u00
 3cspan class\u003d\"a-size-medium a-color-success\"\u003e May take an extra 1-2 days to ship. \u003c/span\u003e           \u003c/div\u003e           \u003cdiv class\u003d\"a-section a-spacing-none\"\u003e           \u003c/div\u003e           \u003cdiv id\u003d\"merchant-info\" class\u003d\"a-section a-spacing-mini\"\u003e            Ships from and sold by Amazon.com.            \u003cspan class\u003d\"\"\u003e Gift-wrap available. \u003c/span\u003e           \u003c/div\u003e          \u003c/div\u003e          \u003cdiv id\u003d\"bbopAndCartBox\" class\u003d\"a-box\"\u003e          \u003cdiv class\u003d\"a-box-inner\"\u003e            \u003cstyle type\u003d\"text/css\"\u003e#bbop-sbbop-container {  margin-bottom: 0px;}\u003c/style\u003e            \u003cdiv id\u003d\"bbop-sbbop-container\" class\u003d\"a-section\"\u003e             \u003cdiv id\u003d\"sbbop-popover-header\" class\u003d\"a-section a-hidden\"\u003e              \u003cdiv class\u003d\"a-box a-alert-inline a-alert-inline
 -success\"\u003e               \u003cdiv class\u003d\"a-box-inner a-alert-container\"\u003e                \u003ci class\u003d\"a-icon a-icon-alert\"\u003e\u003c/i\u003e                \u003cdiv class\u003d\"a-color-base\"\u003e                 Before the First Day has been added to your Cart                \u003c/div\u003e               \u003c/div\u003e              \u003c/div\u003e             \u003c/div\u003e            \u003c/div\u003e            \u003cscript type\u003d\"text/javascript\"\u003eif( window.P ){    P.when(\"A\").execute(function(A) {                if (window.sbbopLoaded \u003d\u003d\u003d undefined) {            window.sbbopLoaded \u003d false;        }                function getQueryParameterByName(name) {            name \u003d name.replace(/[\\[]/, \"\\\\[\").replace(/[\\]]/, \"\\\\]\");            var regex \u003d new RegExp(\"[\\\\?\u0026]\" + name + \"\u003d([^\u0026#]*)\"),                results \u003d regex.exec(location.search);            return result
 s \u003d\u003d null ? \"\" : decodeURIComponent(results[1].replace(/\\+/g, \" \"));        }                var hidden, visibilityChange;        if (typeof document.hidden !\u003d\u003d \"undefined\") {             hidden \u003d \"hidden\";            visibilityChange \u003d \"visibilitychange\";        } else if (typeof document.mozHidden !\u003d\u003d \"undefined\") {            hidden \u003d \"mozHidden\";            visibilityChange \u003d \"mozvisibilitychange\";        } else if (typeof document.msHidden !\u003d\u003d \"undefined\") {            hidden \u003d \"msHidden\";            visibilityChange \u003d \"msvisibilitychange\";        } else if (typeof document.webkitHidden !\u003d\u003d \"undefined\") {            hidden \u003d \"webkitHidden\";            visibilityChange \u003d \"webkitvisibilitychange\";        }                var loadFeatures \u003d function() {                                    var $ \u003d A.$,                $container \u003d $(\"#bbop-sbbop-conta
 iner\");            var data \u003d {                \"ASIN\" : \"1628391340\",                \"merchantID\" : \"ATVPDKIKX0DER\",                \"bbopruleID\" : \"Acquisition_AddToCart_PrimeBasicFreeTrialUpsellEligible\",                \"sbbopruleID\" : \"Acquisition_AddToCart_PrimeBasicFreeTrialUpsellEligible\",                \"deliveryOptions\" : \"[next,std-n-us,sss-us,second]\",                \"preorder\" : \"false\",                \"releaseDateDeliveryEligible\" : \"false\"            };            var scope \u003d \"bbopAjaxCall\";                        if(typeof uet \u003d\u003d\u003d \u0027function\u0027) {                uet(\u0027bb\u0027, scope, {wb: 1});            }                        $.get(\"/gp/product/du/bbop-ms3-ajax-endpoint.html\", data, function(result) {                                                                                $container.append(result);                                                    if(typeof uet \u003d\u003d\u003d \u0027func
 tion\u0027 \u0026\u0026 typeof uex \u003d\u003d\u003d \u0027function\u0027) {                                        uet( \"cf\", scope, {wb: 1} );                    uex( \"ld\", scope, {wb: 1} );                }                                                                if (getQueryParameterByName(\"checkBBOP\") \u003d\u003d\u003d \"true\") {                    P.when(\"a-modal\", \"ready\").execute(function(modal) {                      $(\u0027#bbop-check-box\u0027).click();                    });                }                                loadSBBOP();                            });                        var loadSBBOP \u003d function(){                P.when(\"a-modal\", \"ready\").execute(function(modal) {                    if(!window.sbbopLoaded){                        var sbbop_modal \u003d createModal(modal);                        if (sbbop_modal !\u003d\u003d undefined) {                            $(document.body).delegate(\u0027#add-to-cart-button\u0027,\u00
 27click.sbbop\u0027, function(event){                                $(document.body).undelegate(\u0027#add-to-cart-button\u0027,\u0027click.sbbop\u0027);                                var $sbbop \u003d $(\"#sbbop-container\"),                                    $sbbopContent \u003d $sbbop.find(\"#sbbop-popover-content\"),                                    $sbbopHeader \u003d $container.find(\"#sbbop-popover-header\"),                                    $sbbopFooter \u003d $sbbop.find(\"#sbbop-popover-footer\"),                                    isAsinizationCase \u003d $sbbop.find(\".is-sbbop-asinization-usecase\").length \u003e 0,                                    $bbopCheckbox \u003d $(\u0027#bbop-check-box\u0027),                                    $atcButton \u003d $(\u0027#add-to-cart-button\u0027),                                    atcName \u003d $atcButton.attr(\"name\"),                                    hasSingleYesButton \u003d ($(\u0027#sbbop-yes-button\u0027).leng
 th \u003d\u003d\u003d 1),                                    hasSingleNoButton  \u003d ($(\u0027#sbbop-no-button\u0027).length \u003d\u003d\u003d 1),                                    validAtcButton \u003d ($atcButton.length \u003d\u003d\u003d 1 \u0026\u0026 atcName !\u003d \"submit.add-to-cart-prime-buy-box.x\"),                                    validSbbopContent \u003d ($sbbopContent.length \u003d\u003d\u003d 1 \u0026\u0026 $sbbopHeader.length \u003d\u003d\u003d 1 \u0026\u0026 $sbbopFooter.length \u003d\u003d\u003d 1),                                    validSbbopButtons \u003d (hasSingleYesButton \u0026\u0026 hasSingleNoButton),                                    bbopIsNotChecked \u003d !$bbopCheckbox.is(\":checked\");                                if (validAtcButton \u0026\u0026 validSbbopContent \u0026\u0026 validSbbopButtons \u0026\u0026 bbopIsNotChecked) {                                    $atcButton.bind(\u0027click.sbbop\u0027, function(){return false;});              
                         $(document.body).delegate(\"#sbbop-yes-button\", \"click\", function(event){                                        if(isAsinizationCase){                                            $bbopCheckbox.prop(\"checked\",true);                                            logMetric(\"CSM_prime_surprise_BBOP_yes_button_HO\");                                        }                                        else {                                            $atcButton.attr(\"name\", \"submit.add-to-cart-prime-buy-box.x\");                                            logMetric(\"CSM_prime_surprise_BBOP_yes_button\");                                        }                                        safeATCClick();                                    });                                    $(document.body).delegate(\"#sbbop-no-button\", \"click\", function(event){                                        if(isAsinizationCase) {                                            logMetric(\"C
 SM_prime_surprise_BBOP_no_button_HO\");                                        }                                        else {                                            logMetric(\"CSM_prime_surprise_BBOP_no_button\");                                        }                                        safeATCClick();                                    });                                                                        $(document.body).delegate(\".a-popover\", \"keydown\", function(event){                                        if(event.keyCode \u003d\u003d \"27\"){                                            event.preventDefault();                                        }                                    });                                    A.on(\"a:popover:ajaxFail:sbbop_modal\", safeATCClick);                                    A.on(\"a:popover:hide:sbbop_modal\", function(){                                        if(isAsinizationCase) {                                     
        logMetric(\"CSM_prime_surprise_BBOP_close_button_HO\");                                        }                                        else {                                            logMetric(\"CSM_prime_surprise_BBOP_close_button\");                                        }                                        safeATCClick();                                    });                                    A.on(\"a:popover:beforeShow:sbbop_modal\", function(){                                            setEventEmbu();                                    });                                    sbbop_modal.show();                                    if(isAsinizationCase) {                                        logMetric(\"CSM_prime_surprise_BBOP_presentation_HO\");                                    }                                    else {                                        logMetric(\"CSM_prime_surprise_BBOP_presentation\");                                    }            
                         return false;                                }                             });                        }                        window.sbbopLoaded \u003d true;                    }                });            };            var createModal \u003d function(modal) {                var $sbbop \u003d $(\"#sbbop-container\"),                    $sbbopContent \u003d $sbbop.find(\"#sbbop-popover-content\"),                    $sbbopHeader \u003d $container.find(\"#sbbop-popover-header\"),                    $sbbopFooter \u003d $sbbop.find(\"#sbbop-popover-footer\");                if($sbbopContent.length \u003d\u003d\u003d 1 \u0026\u0026 $sbbopHeader.length \u003d\u003d\u003d 1 \u0026\u0026 $sbbopFooter.length \u003d\u003d\u003d 1){                     sbbopWidth \u003d parseInt(($sbbop.find(\"#sbbop-popover\")).attr(\"popover-width\"), 10);                    return modal.create($sbbop, {                        \"inlineContent\": $sbbopContent.html(),              
           \"header\": $sbbopHeader.html(),                        \"footer\": $sbbopFooter.html(),                        \"width\": sbbopWidth,                        \"activate\": \"onclick\",                        \"name\": \"sbbop_modal\"                    });                }                return undefined;            }            var logMetric \u003d function(customTag){                if(window.ue \u0026\u0026 ue.tag) {                    ue.tag(customTag, \"surpriseBBOP\")                }            };            var postBackMetrics \u003d function(){                if (window.ue){                    uex(\"ld\", \"surpriseBBOP\");                }            };            var setEventEmbu \u003d function(){                var eventData \u003d { \"eventCode\" : \"106\" };                $.get(\"/gp/prime/utility/record-embu-event.html\", eventData);            };            var safeATCClick \u003d function() {                postBackMetrics();                             
                     $(\u0027#add-to-cart-button\u0027).unbind(\u0027click.sbbop\u0027)                         .click()                                                           .bind(\u0027click.sbbop\u0027,function(){return false});             };                                }                var handleVisibilityChange \u003d function() {            if (!document[hidden]) {                                document.removeEventListener(visibilityChange, handleVisibilityChange);                loadFeatures();            }        }        if (typeof document.addEventListener \u003d\u003d\u003d \"undefined\" ||            typeof document[hidden] \u003d\u003d\u003d \"undefined\" || !document[hidden]) {                        loadFeatures();        } else {                        document.addEventListener(visibilityChange, handleVisibilityChange);        }    });}\u003c/script\u003e            \u003cdiv class\u003d\"a-button-stack\"\u003e             \u003cspan id\u003d\"submit.add-to-c
 art\" class\u003d\"a-button a-spacing-small a-button-primary a-button-icon\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003ci class\u003d\"a-icon a-icon-cart\"\u003e\u003c/i\u003e\u003cinput id\u003d\"add-to-cart-button\" name\u003d\"submit.add-to-cart\" title\u003d\"Add to Shopping Cart\" data-hover\u003d\"Select \u0026lt;b\u0026gt;__dims__\u0026lt;/b\u0026gt; from the left\u0026lt;br\u0026gt; to add to Shopping Cart\" class\u003d\"a-button-input\" type\u003d\"submit\" value\u003d\"Add to Cart\" aria-labelledby\u003d\"submit.add-to-cart-announce\" /\u003e\u003cspan id\u003d\"submit.add-to-cart-announce\" class\u003d\"a-button-text\" aria-hidden\u003d\"true\"\u003eAdd to Cart\u003c/span\u003e\u003c/span\u003e\u003c/span\u003e            \u003c/div\u003e           \u003c/div\u003e         \u003c/div\u003e          \u003cdiv class\u003d\"a-row a-spacing-none\"\u003e           \u003cdiv id\u003d\"oneClickSignIn\" class\u003d\"a-section a-spacing-none\"\u003e            \u003c
 div class\u003d\"a-divider a-divider-break a-spacing-micro\"\u003e            \u003ch5\u003e\u003ca href\u003d\"/gp/product/utility/edit-one-click-pref.html?ie\u003dUTF8\u0026amp;query\u003dselectObb%3dnew\u0026amp;returnPath\u003d%2fgp%2fproduct%2f1628391340\"\u003e\u003cspan class\u003d\"a-size-mini\"\u003eTurn on 1-Click ordering for this browser\u003c/span\u003e\u003c/a\u003e\u003c/h5\u003e           \u003c/div\u003e           \u003c/div\u003e          \u003c/div\u003e          \u003cdiv class\u003d\"a-row\"\u003e           \u003cdiv id\u003d\"dpFastTrack_feature_div\" data-feature-name\u003d\"dpFastTrack\" data-template-name\u003d\"dpFastTrack\" class\u003d\"a-section a-spacing-none a-spacing-top-small feature\"\u003e            \u003c!-- We only want padding, if the weblab is on--\u003e            \u003cdiv id\u003d\"fast-track\" class\u003d\"a-section a-spacing-none\"\u003e             \u003cinput type\u003d\"hidden\" id\u003d\"ftSelectAsin\" value\u003d\"1628391340\" /\u003e
              \u003cinput type\u003d\"hidden\" id\u003d\"ftSelectMerchant\" value\u003d\"ATVPDKIKX0DER\" /\u003e             \u003cdiv id\u003d\"fast-track-message\" class\u003d\"a-section a-spacing-none\"\u003e             \u003c/div\u003e             \u003cscript type\u003d\"text/javascript\"\u003e    P.when(\"A\", \"jQuery\").execute(function(A, $) {        $(\"#quantity\").live(\"change\", function (event) {            if (event.updateFTOnQuantityChange) {                return;            }            event.updateFTOnQuantityChange \u003d 1;            var quantity \u003d $(this).val();            var asin \u003d $(\"#ftSelectAsin\").val();            var merchantId \u003d $(\"#ftSelectMerchant\").val();            if (!asin || !merchantId) {                return;            }            var params \u003d [];            params.push(\"asin\u003d\" + asin);            params.push(\"quantity\u003d\" + quantity);            params.push(\"merchantId\u003d\" + merchantId);           
  $.ajax({                type: \"POST\",                url: \"/gp/product/features/dp-fast-track/udp-ajax-handler/get-quantity-update-message.html?ie\u003dUTF8\",                contentType: \u0027application/x-www-form-urlencoded;charset\u003dutf-8\u0027,                data: params.join(\u0027\u0026\u0027),                dataType: \"html\",                success: function(objResponse) {                    if (objResponse !\u003d null \u0026\u0026 objResponse !\u003d \"\") {                        $(\"#fast-track-message\").replaceWith(objResponse);                    }                }            });            return;        });    });\u003c/script\u003e            \u003c/div\u003e           \u003c/div\u003e          \u003c/div\u003e          \u003cdiv id\u003d\"unifiedLocation_feature_div\" data-feature-name\u003d\"unifiedLocation\" data-template-name\u003d\"unifiedLocation\" class\u003d\"a-section feature unifiedLocationMarginBottomClass\"\u003e           \u003cscript type\u
 003d\"text/javascript\"\u003e            P.now(\"LOCUX_DETALPAGE_51079_T1\").execute(function(LOCUX_DETALPAGE_51079_T1) {                if (!LOCUX_DETALPAGE_51079_T1) {                    P.declare(\"LOCUX_DETALPAGE_51079_T1\", {});                }            });        \u003c/script\u003e           \u003cdiv class\u003d\"a-section a-spacing-none\"\u003e            \u003chr class\u003d\"a-divider-normal\" /\u003e            \u003cdiv class\u003d\"a-section a-spacing-mini\"\u003e             \u003cspan class\u003d\"a-text-bold\"\u003e Ship to: \u003c/span\u003e            \u003c/div\u003e            \u003cdiv class\u003d\"a-section a-spacing-none\"\u003e             \u003cspan class\u003d\"a-declarative\" data-action\u003d\"a-popover\" data-a-popover\u003d\"{\u0026quot;closeButton\u0026quot;:\u0026quot;false\u0026quot;,\u0026quot;name\u0026quot;:\u0026quot;LUXAddressSelector\u0026quot;,\u0026quot;activate\u0026quot;:\u0026quot;onclick\u0026quot;}\"\u003e \u003ca href\u003d\"javascr
 ipt:void(0)\" class\u003d\"a-popover-trigger a-declarative\"\u003e \u003cspan class\u003d\"a-color-base lux-location-label\"\u003e DALLAS, TX 75201 \u003c/span\u003e \u003ci class\u003d\"a-icon a-icon-popover\"\u003e\u003c/i\u003e\u003c/a\u003e \u003c/span\u003e            \u003c/div\u003e            \u003cdiv class\u003d\"a-popover-preload\" id\u003d\"a-popover-LUXAddressSelector\"\u003e             \u003c!-- Section to show to unrecognized customers (instead of address selector) --\u003e             \u003cdiv id\u003d\"unifiedLocationTitleNoAddress\" class\u003d\"a-section a-spacing-small a-spacing-top-small a-text-center\"\u003e              \u003cspan class\u003d\"a-text-bold\"\u003e To see addresses, please \u003c/span\u003e              \u003cbr /\u003e              \u003cspan id\u003d\"unifiedLocationSignIn\" class\u003d\"a-button a-spacing-top-base a-button-primary\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003cinput class\u003d\"a-button-input\" type\u003d\"subm
 it\" aria-labelledby\u003d\"unifiedLocationSignIn-announce\" /\u003e\u003cspan id\u003d\"unifiedLocationSignIn-announce\" class\u003d\"a-button-text\" aria-hidden\u003d\"true\"\u003e Sign in \u003c/span\u003e\u003c/span\u003e\u003c/span\u003e             \u003c/div\u003e             \u003c!-- Place holder for anchoring the address selector --\u003e             \u003cdiv id\u003d\"lux-address-selector-anchor\" class\u003d\"a-section a-spacing-none\"\u003e\u003c/div\u003e             \u003cdiv class\u003d\"a-section a-spacing-none lux-address-selector-divider\"\u003e             \u003c/div\u003e             \u003cdiv id\u003d\"unifiedLocationAddrInputDiv\" class\u003d\"a-section a-spacing-none\"\u003e              \u003cdiv class\u003d\"a-divider a-divider-break lux-zip-selector-divider\"\u003e              \u003ch5\u003eor\u003c/h5\u003e             \u003c/div\u003e              \u003clabel class\u003d\"a-text-bold\"\u003e Use this location: \u003c/label\u003e              \u003cdiv 
 class\u003d\"a-row a-spacing-top-micro\" role\u003d\"form\"\u003e               \u003cdiv class\u003d\"a-column a-span8\" role\u003d\"form\"\u003e                \u003cinput type\u003d\"text\" maxlength\u003d\"5\" id\u003d\"unifiedLocationAddrInput\" placeholder\u003d\"Enter US zip\" class\u003d\"a-input-text a-span12\" /\u003e               \u003c/div\u003e               \u003cdiv class\u003d\"a-column a-span4 a-span-last\" role\u003d\"button\"\u003e                \u003cspan id\u003d\"unifiedLocationAddrUpdate\" class\u003d\"a-button a-button-span12\"\u003e\u003cspan class\u003d\"a-button-inner\"\u003e\u003cinput class\u003d\"a-button-input\" type\u003d\"submit\" aria-labelledby\u003d\"unifiedLocationAddrUpdate-announce\" /\u003e\u003cspan id\u003d\"unifiedLocationAddrUpdate-announce\" class\u003d\"a-button-text\" aria-hidden\u003d\"true\"\u003e Update \u003c/span\u003e\u003c/span\u003e\u003c/span\u003e               \u003c/div\u003e              \u003c/div\u003e              \u00
 3cdiv id\u003d\"unifiedLocationInvalidInputAlert\" class\u003d\"a-row a-spacing-top-small aok-hidden\"\u003e               \u003cspan class\u003d\"a-color-error\"\u003e Please enter a valid US zip code. \u003c/span\u003e              \u003c/div\u003e              \u003cdiv id\u003d\"LUXInvalidZipCodeAlert\" class\u003d\"a-row a-spacing-top-small aok-hidden\" role\u003d\"alert\"\u003e               \u003cdiv class\u003d\"a-box a-alert-inline a-alert-inline-error a-spacing-none\"\u003e               \u003cdiv class\u003d\"a-box-inner a-alert-container\"\u003e                \u003ci class\u003d\"a-icon a-icon-alert\"\u003e\u003c/i\u003e                \u003cdiv class\u003d\"a-alert-content\"\u003e                  Please enter a valid US zip code.                 \u003c/div\u003e               \u003c/div\u003e              \u003c/div\u003e              \u003c/div\u003e              \u003cdiv id\u003d\"LUXMilitaryZipCodeAlert\" class\u003d\"a-row a-spacing-top-small aok-hidden\" role\u0
 03d\"alert\"\u003e               \u003cdiv class\u003d\"a-box a-alert-inline a-alert-inline-info a-spacing-none\"\u003e               \u003cdiv class\u003d\"a-box-inner a-alert-container\"\u003e                \u003ci class\u003d\"a-icon a-icon-alert\"\u003e\u003c/i\u003e                \u003cdiv class\u003d\"a-alert-content\"\u003e                  Shipping to a APO/FPO/DPO? Please add the address to your address book. Make sure you include the unit and box numbers (if assigned).                 \u003c/div\u003e               \u003c/div\u003e              \u003c/div\u003e              \u003c/div\u003e             \u003c/div\u003e             \u003cdiv class\u003d\"a-divider a-divider-break a-spacing-base a-spacing-top-base lux-country-selector-divider\"\u003e             \u003ch5\u003eor\u003c/h5\u003e            \u003c/div\u003e             \u003cdiv id\u003d\"unifiedLocationCountrySelectionDiv\" class\u003d\"a-row a-spacing-none a-spacing-top-small\"\u003e             \u003c/div\
 u003e            \u003c/div\u003e            \u003cscript type\u003d\"text/javascript\"\u003e            var shouldPopulateCountrySelector \u003d true;            P.now(\"LOCUX_DETAILPAGE_68144_T1\").execute(function(LOCUX_DETAILPAGE_68144_T1) {                if(LOCUX_DETAILPAGE_68144_T1) {                    shouldPopulateCountrySelector \u003d true;                }            });            var fullPageRefresh \u003d false;            P.now(\"LOCUX_FULLPAGE_REFRESH\").execute(function(LOCUX_FULLPAGE_REFRESH) {                if(LOCUX_FULLPAGE_REFRESH) {                    fullPageRefresh \u003d true;                }            });            P.now(\"LUXContext\").execute(function(LUXContext) {                if (!LUXContext) {                    P.declare(\"LUXContext\", {                        isRecognizedCustomer: false,                        deviceType: \"web\",                        isOneClick: false,                        clientPage: \"DPX\",                        sho
 uldPopulateCountrySelector: shouldPopulateCountrySelector,                        fullPageRefresh: fullPageRefresh                    });                }            });            P.when(\"LUXController\").execute(function(LUXController) {                LUXController.init({                    addressLabel : \"\",                    addressId    : \"\",                    obfuscatedId : \"\",                    countryCode  : \"US\",                    zipCode      : \"75201\",                    state        : \"TX\",                    city         : \"DALLAS\",                    district     : \"\",                    locationType : \"IP2LOCATION\"                });            });        \u003c/script\u003e           \u003c/div\u003e           \u003cinput type\u003d\"hidden\" name\u003d\"dropdown-selection\" value\u003d\"add-new\" id\u003d\"unifiedLocationAddress\" data-addnewaddress\u003d\"add-new\" /\u003e           \u003cscript type\u003d\"text/javascript\"\u003e    P.now(\
 "LUXDPOnly\").execute(function(DP) {        DP || P.declare(\"LUXDPOnly\", {});    });    P.when(\"LUXDPRefreshController\").execute(function(Controller) {        Controller.init({            asin : \"1628391340\",            merchantId : \"ATVPDKIKX0DER\",            deviceType : \"web\",            smid : \"\",            exMerchId : \"\",            wdg: \"book_display_on_website\",            oneClick : false,            addressId : \"\",            obfuscatedId : \"\",            city : \"DALLAS\",            state : \"TX\",            countryCode : \"US\",            zipCode : \"75201\",            district : \"\",            useTwisterRefresh : \"C\",            updateAddressForNon1Click : \"T1\",            relatedRequestId : \"W7WQP2BDJ090798P990F\"        });    });    \u003c/script\u003e          \u003c/div\u003e          \u003cdiv class\u003d\"a-row\"\u003e           \u003cdiv id\u003d\"holidayAvailabilityMessage_feature_div\" data-feature-name\u003d\"holidayAvailability
 Message\" data-template-name\u003d\"holidayAvailabilityMessage\" class\u003d\"a-section a-spacing-top-small feature\"\u003e           \u003c/div\u003e          \u003c/div\u003e         \u003c/div\u003e        \u003c/div\u003e       \u003c/div\u003e     \u003c/div\u003e      \u003cdiv class\u003d\"a-box rbbSection unselected\"\u003e      \u003cdiv class\u003d\"a-box-inner\"\u003e        \u003cdiv class\u003d\"a-section a-spacing-none a-padding-none\"\u003e         \u003cdiv id\u003d\"usedBuySection\" class\u003d\"rbbHeader dp-accordion-row\"\u003e          \u003ci class\u003d\"a-icon a-icon-radio-inactive\"\u003e\u003c/i\u003e          \u003ca class\u003d\"a-link-normal rbbHeaderLink\" href\u003d\"/gp/product/1628391340?selectObb\u003dused\"\u003e \u003ch5\u003e            \u003cdiv class\u003d\"a-row\"\u003e             \u003cdiv class\u003d\"a-column a-span4 a-text-left a-nowrap\"\u003e              \u003cspan class\u003d\"a-text-bold\"\u003eBuy Used\u003c/span\u003e             \u
 003c/div\u003e             \u003cdiv class\u003d\"a-column a-span8 a-text-right a-span-last\"\u003e              \u003cdiv class\u003d\"inlineBlock-display\"\u003e               \u003cspan class\u003d\"a-letter-space\"\u003e\u003c/span\u003e               \u003cspan class\u003d\"a-color-base offer-price a-text-normal\"\u003e$12.94\u003c/span\u003e              \u003c/div

<TRUNCATED>


[09/11] incubator-griffin git commit: Dsl modify

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/KafkaCacheDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/KafkaCacheDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/KafkaCacheDirectDataConnector.scala
new file mode 100644
index 0000000..70ddcde
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/KafkaCacheDirectDataConnector.scala
@@ -0,0 +1,125 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.direct
+//
+//import org.apache.griffin.measure.config.params.user.DataConnectorParam
+//import org.apache.griffin.measure.data.connector.DataConnectorFactory
+//import org.apache.griffin.measure.data.connector.cache.CacheDataConnector
+//import org.apache.griffin.measure.data.connector.streaming.StreamingDataConnector
+//import org.apache.griffin.measure.result._
+//import org.apache.griffin.measure.rule._
+//import org.apache.spark.rdd.RDD
+//import org.apache.spark.sql.SQLContext
+//import org.apache.spark.streaming.StreamingContext
+//
+//import scala.util.{Failure, Success, Try}
+//
+//case class KafkaCacheDirectDataConnector(@transient streamingDataConnectorTry: Try[StreamingDataConnector],
+//                                         cacheDataConnectorTry: Try[CacheDataConnector],
+//                                         dataConnectorParam: DataConnectorParam,
+//                                         ruleExprs: RuleExprs,
+//                                         constFinalExprValueMap: Map[String, Any]
+//                                        ) extends StreamingCacheDirectDataConnector {
+//
+//  val cacheDataConnector: CacheDataConnector = cacheDataConnectorTry match {
+//    case Success(cntr) => cntr
+//    case Failure(ex) => throw ex
+//  }
+//  @transient val streamingDataConnector: StreamingDataConnector = streamingDataConnectorTry match {
+//    case Success(cntr) => cntr
+//    case Failure(ex) => throw ex
+//  }
+//
+//  protected def transform(rdd: RDD[(streamingDataConnector.K, streamingDataConnector.V)],
+//                          ms: Long
+//                         ): RDD[Map[String, Any]] = {
+//    val dataInfoMap = DataInfo.cacheInfoList.map(_.defWrap).toMap + TimeStampInfo.wrap(ms)
+//
+//    rdd.flatMap { kv =>
+//      val msg = kv._2
+//
+//      val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(msg), ruleExprs.cacheExprs, constFinalExprValueMap)
+//      val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
+//
+//      finalExprValueMaps.map { vm =>
+//        vm ++ dataInfoMap
+//      }
+//    }
+//  }
+//
+//  def metaData(): Try[Iterable[(String, String)]] = Try {
+//    Map.empty[String, String]
+//  }
+//
+//  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = Try {
+//    cacheDataConnector.readData match {
+//      case Success(rdd) => {
+//        rdd.flatMap { row =>
+//          val finalExprValueMap = ruleExprs.finalCacheExprs.flatMap { expr =>
+//            row.get(expr._id).flatMap { d =>
+//              Some((expr._id, d))
+//            }
+//          }.toMap
+//
+//          val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
+//            row.get(info.key) match {
+//              case Some(d) => (info.key -> d)
+//              case _ => info.defWrap
+//            }
+//          }.toMap
+//
+//          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
+//            expr.calculate(finalExprValueMap) match {
+//              case Some(v) => Some(v.asInstanceOf[AnyRef])
+//              case _ => None
+//            }
+//          }
+//          val key = toTuple(groupbyData)
+//
+//          Some((key, (finalExprValueMap, dataInfoMap)))
+//        }
+//      }
+//      case Failure(ex) => throw ex
+//    }
+//  }
+//
+//  override def cleanOldData(): Unit = {
+//    cacheDataConnector.cleanOldData
+//  }
+//
+//  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
+//    if (dataConnectorParam.getMatchOnce) {
+//      cacheDataConnector.updateOldData(t, oldData)
+//    }
+//  }
+//
+//  override def updateAllOldData(oldRdd: RDD[Map[String, Any]]): Unit = {
+//    if (dataConnectorParam.getMatchOnce) {
+//      cacheDataConnector.updateAllOldData(oldRdd)
+//    }
+//  }
+//
+//  private def toTuple[A <: AnyRef](as: Seq[A]): Product = {
+//    if (as.size > 0) {
+//      val tupleClass = Class.forName("scala.Tuple" + as.size)
+//      tupleClass.getConstructors.apply(0).newInstance(as: _*).asInstanceOf[Product]
+//    } else None
+//  }
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/StreamingCacheDirectDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/StreamingCacheDirectDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/StreamingCacheDirectDataConnector.scala
new file mode 100644
index 0000000..dddf430
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/StreamingCacheDirectDataConnector.scala
@@ -0,0 +1,60 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.direct
+//
+//import org.apache.griffin.measure.data.connector.cache.CacheDataConnector
+//import org.apache.griffin.measure.data.connector.streaming.StreamingDataConnector
+//import org.apache.griffin.measure.result.{DataInfo, TimeStampInfo}
+//import org.apache.griffin.measure.rule.ExprValueUtil
+//import org.apache.spark.rdd.RDD
+//
+//import scala.util.{Failure, Success}
+//
+//trait StreamingCacheDirectDataConnector extends DirectDataConnector {
+//
+//  val cacheDataConnector: CacheDataConnector
+//  @transient val streamingDataConnector: StreamingDataConnector
+//
+//  def available(): Boolean = {
+//    cacheDataConnector.available && streamingDataConnector.available
+//  }
+//
+//  def init(): Unit = {
+//    cacheDataConnector.init
+//
+//    val ds = streamingDataConnector.stream match {
+//      case Success(dstream) => dstream
+//      case Failure(ex) => throw ex
+//    }
+//
+//    ds.foreachRDD((rdd, time) => {
+//      val ms = time.milliseconds
+//
+//      val valueMapRdd = transform(rdd, ms)
+//
+//      // save data frame
+//      cacheDataConnector.saveData(valueMapRdd, ms)
+//    })
+//  }
+//
+//  protected def transform(rdd: RDD[(streamingDataConnector.K, streamingDataConnector.V)],
+//                          ms: Long
+//                         ): RDD[Map[String, Any]]
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/TextDirBatchDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/TextDirBatchDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/TextDirBatchDataConnector.scala
new file mode 100644
index 0000000..abc547b
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/batch/TextDirBatchDataConnector.scala
@@ -0,0 +1,136 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.batch
+
+import org.apache.griffin.measure.config.params.user.DataConnectorParam
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.griffin.measure.utils.HdfsUtil
+import org.apache.spark.sql.{DataFrame, SQLContext}
+import org.apache.griffin.measure.utils.ParamUtil._
+
+// data connector for avro file
+case class TextDirBatchDataConnector(sqlContext: SQLContext, dqEngines: DqEngines, dcParam: DataConnectorParam
+                                    ) extends BatchDataConnector {
+
+  val config = dcParam.config
+
+  val DirPath = "dir.path"
+  val DataDirDepth = "data.dir.depth"
+  val SuccessFile = "success.file"
+  val DoneFile = "done.file"
+
+  val dirPath = config.getString(DirPath, "")
+  val dataDirDepth = config.getInt(DataDirDepth, 0)
+  val successFile = config.getString(SuccessFile, "_SUCCESS")
+  val doneFile = config.getString(DoneFile, "_DONE")
+
+  val ignoreFilePrefix = "_"
+
+  private def dirExist(): Boolean = {
+    HdfsUtil.existPath(dirPath)
+  }
+
+  def data(ms: Long): Option[DataFrame] = {
+    try {
+      val dataDirs = listSubDirs(dirPath :: Nil, dataDirDepth, readable)
+      // touch done file for read dirs
+      dataDirs.foreach(dir => touchDone(dir))
+
+      val validDataDirs = dataDirs.filter(dir => !emptyDir(dir))
+
+      if (validDataDirs.size > 0) {
+        val df = sqlContext.read.text(validDataDirs:  _*)
+        val dfOpt = Some(df)
+        val preDfOpt = preProcess(dfOpt, ms)
+        preDfOpt
+      } else {
+        None
+      }
+    } catch {
+      case e: Throwable => {
+        error(s"load text dir ${dirPath} fails: ${e.getMessage}")
+        None
+      }
+    }
+  }
+
+  private def listSubDirs(paths: Seq[String], depth: Int, filteFunc: (String) => Boolean): Seq[String] = {
+    val subDirs = paths.flatMap { path => HdfsUtil.listSubPathsByType(path, "dir", true) }
+    if (depth <= 0) {
+      subDirs.filter(filteFunc)
+    } else {
+      listSubDirs(subDirs, depth, filteFunc)
+    }
+  }
+
+  private def readable(dir: String): Boolean = isSuccess(dir) && !isDone(dir)
+  private def isDone(dir: String): Boolean = HdfsUtil.existFileInDir(dir, doneFile)
+  private def isSuccess(dir: String): Boolean = HdfsUtil.existFileInDir(dir, successFile)
+
+  private def touchDone(dir: String): Unit = HdfsUtil.createEmptyFile(HdfsUtil.getHdfsFilePath(dir, doneFile))
+
+  private def emptyDir(dir: String): Boolean = {
+    HdfsUtil.listSubPathsByType(dir, "file").filter(!_.startsWith(ignoreFilePrefix)).size == 0
+  }
+
+//  def available(): Boolean = {
+//    (!concreteFileFullPath.isEmpty) && fileExist
+//  }
+
+//  def init(): Unit = {}
+
+//  def metaData(): Try[Iterable[(String, String)]] = {
+//    Try {
+//      val st = sqlContext.read.format("com.databricks.spark.avro").load(concreteFileFullPath).schema
+//      st.fields.map(f => (f.name, f.dataType.typeName))
+//    }
+//  }
+
+//  def data(): Try[RDD[(Product, (Map[String, Any], Map[String, Any]))]] = {
+//    Try {
+//      loadDataFile.flatMap { row =>
+//        // generate cache data
+//        val cacheExprValueMaps = ExprValueUtil.genExprValueMaps(Some(row), ruleExprs.cacheExprs, constFinalExprValueMap)
+//        val finalExprValueMaps = ExprValueUtil.updateExprValueMaps(ruleExprs.finalCacheExprs, cacheExprValueMaps)
+//
+//        // data info
+//        val dataInfoMap: Map[String, Any] = DataInfo.cacheInfoList.map { info =>
+//          try {
+//            (info.key -> row.getAs[info.T](info.key))
+//          } catch {
+//            case e: Throwable => info.defWrap
+//          }
+//        }.toMap
+//
+//        finalExprValueMaps.flatMap { finalExprValueMap =>
+//          val groupbyData: Seq[AnyRef] = ruleExprs.groupbyExprs.flatMap { expr =>
+//            expr.calculate(finalExprValueMap) match {
+//              case Some(v) => Some(v.asInstanceOf[AnyRef])
+//              case _ => None
+//            }
+//          }
+//          val key = toTuple(groupbyData)
+//
+//          Some((key, (finalExprValueMap, dataInfoMap)))
+//        }
+//      }
+//    }
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/CacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/CacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/CacheDataConnector.scala
new file mode 100644
index 0000000..67dcc06
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/CacheDataConnector.scala
@@ -0,0 +1,33 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.cache
+//
+//import org.apache.griffin.measure.data.connector.DataConnector
+//import org.apache.spark.rdd.RDD
+//
+//import scala.util.Try
+//
+//trait CacheDataConnector extends DataConnector with DataCacheable with DataUpdatable {
+//
+//  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit
+//
+//  def readData(): Try[RDD[Map[String, Any]]]
+//
+//}
+//

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataCacheable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataCacheable.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataCacheable.scala
new file mode 100644
index 0000000..79162be
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataCacheable.scala
@@ -0,0 +1,86 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.cache
+//
+//import java.util.concurrent.atomic.AtomicLong
+//
+//import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+//
+//trait DataCacheable {
+//
+//  protected val defCacheInfoPath = PathCounter.genPath
+//
+//  val cacheInfoPath: String
+//  val readyTimeInterval: Long
+//  val readyTimeDelay: Long
+//
+//  def selfCacheInfoPath = s"${TimeInfoCache.infoPath}/${cacheInfoPath}"
+//
+//  def selfCacheTime = TimeInfoCache.cacheTime(selfCacheInfoPath)
+//  def selfLastProcTime = TimeInfoCache.lastProcTime(selfCacheInfoPath)
+//  def selfReadyTime = TimeInfoCache.readyTime(selfCacheInfoPath)
+//  def selfCleanTime = TimeInfoCache.cleanTime(selfCacheInfoPath)
+//
+//  protected def submitCacheTime(ms: Long): Unit = {
+//    val map = Map[String, String]((selfCacheTime -> ms.toString))
+//    InfoCacheInstance.cacheInfo(map)
+//  }
+//
+//  protected def submitReadyTime(ms: Long): Unit = {
+//    val curReadyTime = ms - readyTimeDelay
+//    if (curReadyTime % readyTimeInterval == 0) {
+//      val map = Map[String, String]((selfReadyTime -> curReadyTime.toString))
+//      InfoCacheInstance.cacheInfo(map)
+//    }
+//  }
+//
+//  protected def submitLastProcTime(ms: Long): Unit = {
+//    val map = Map[String, String]((selfLastProcTime -> ms.toString))
+//    InfoCacheInstance.cacheInfo(map)
+//  }
+//
+//  protected def submitCleanTime(ms: Long): Unit = {
+//    val cleanTime = genCleanTime(ms)
+//    val map = Map[String, String]((selfCleanTime -> cleanTime.toString))
+//    InfoCacheInstance.cacheInfo(map)
+//  }
+//
+//  protected def genCleanTime(ms: Long): Long = ms
+//
+//  protected def readCleanTime(): Option[Long] = {
+//    val key = selfCleanTime
+//    val keys = key :: Nil
+//    InfoCacheInstance.readInfo(keys).get(key).flatMap { v =>
+//      try {
+//        Some(v.toLong)
+//      } catch {
+//        case _ => None
+//      }
+//    }
+//  }
+//
+//}
+//
+//object PathCounter {
+//  private val counter: AtomicLong = new AtomicLong(0L)
+//  def genPath(): String = s"path_${increment}"
+//  private def increment(): Long = {
+//    counter.incrementAndGet()
+//  }
+//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataUpdatable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataUpdatable.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataUpdatable.scala
new file mode 100644
index 0000000..61e8413
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/DataUpdatable.scala
@@ -0,0 +1,30 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.cache
+//
+//import org.apache.spark.rdd.RDD
+//
+//trait DataUpdatable {
+//
+//  def cleanOldData(): Unit = {}
+//
+//  def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {}
+//  def updateAllOldData(oldRdd: RDD[Map[String, Any]]): Unit = {}
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/HiveCacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/HiveCacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/HiveCacheDataConnector.scala
new file mode 100644
index 0000000..4c7b45b
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/HiveCacheDataConnector.scala
@@ -0,0 +1,351 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.cache
+//
+//import java.util.concurrent.TimeUnit
+//
+//import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+//import org.apache.griffin.measure.config.params.user.DataCacheParam
+//import org.apache.griffin.measure.result.TimeStampInfo
+//import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, JsonUtil, TimeUtil}
+//import org.apache.spark.rdd.RDD
+//import org.apache.spark.sql.SQLContext
+//import org.apache.spark.sql.hive.HiveContext
+//
+//import scala.util.{Success, Try}
+//
+//case class HiveCacheDataConnector(sqlContext: SQLContext, dataCacheParam: DataCacheParam
+//                                 ) extends CacheDataConnector {
+//
+//  if (!sqlContext.isInstanceOf[HiveContext]) {
+//    throw new Exception("hive context not prepared!")
+//  }
+//
+//  val config = dataCacheParam.config
+//  val InfoPath = "info.path"
+//  val cacheInfoPath: String = config.getOrElse(InfoPath, defCacheInfoPath).toString
+//
+//  val newCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.new")
+//  val oldCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.old")
+//
+//  val timeRangeParam: List[String] = if (dataCacheParam.timeRange != null) dataCacheParam.timeRange else Nil
+//  val deltaTimeRange: (Long, Long) = (timeRangeParam ::: List("0", "0")) match {
+//    case s :: e :: _ => {
+//      val ns = TimeUtil.milliseconds(s) match {
+//        case Some(n) if (n < 0) => n
+//        case _ => 0
+//      }
+//      val ne = TimeUtil.milliseconds(e) match {
+//        case Some(n) if (n < 0) => n
+//        case _ => 0
+//      }
+//      (ns, ne)
+//    }
+//    case _ => (0, 0)
+//  }
+//
+//  val Database = "database"
+//  val database: String = config.getOrElse(Database, "").toString
+//  val TableName = "table.name"
+//  val tableName: String = config.get(TableName) match {
+//    case Some(s: String) if (s.nonEmpty) => s
+//    case _ => throw new Exception("invalid table.name!")
+//  }
+//  val ParentPath = "parent.path"
+//  val parentPath: String = config.get(ParentPath) match {
+//    case Some(s: String) => s
+//    case _ => throw new Exception("invalid parent.path!")
+//  }
+//  val tablePath = HdfsUtil.getHdfsFilePath(parentPath, tableName)
+//
+//  val concreteTableName = if (dbPrefix) s"${database}.${tableName}" else tableName
+//
+//  val ReadyTimeInterval = "ready.time.interval"
+//  val ReadyTimeDelay = "ready.time.delay"
+//  val readyTimeInterval: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeInterval, "1m").toString).getOrElse(60000L)
+//  val readyTimeDelay: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeDelay, "1m").toString).getOrElse(60000L)
+//
+//  val TimeStampColumn: String = TimeStampInfo.key
+//  val PayloadColumn: String = "payload"
+//
+////  type Schema = (Long, String)
+//  val schema: List[(String, String)] = List(
+//    (TimeStampColumn, "bigint"),
+//    (PayloadColumn, "string")
+//  )
+//  val schemaName = schema.map(_._1)
+//
+////  type Partition = (Long, Long)
+//  val partition: List[(String, String, String)] = List(
+//    ("hr", "bigint", "hour"),
+//    ("min", "bigint", "min")
+//  )
+//  val partitionName = partition.map(_._1)
+//
+//  private val fieldSep = """|"""
+//  private val rowSep = """\n"""
+//  private val rowSepLiteral = "\n"
+//
+//  private def dbPrefix(): Boolean = {
+//    database.nonEmpty && !database.equals("default")
+//  }
+//
+//  private def tableExists(): Boolean = {
+//    Try {
+//      if (dbPrefix) {
+//        sqlContext.tables(database).filter(tableExistsSql).collect.size
+//      } else {
+//        sqlContext.tables().filter(tableExistsSql).collect.size
+//      }
+//    } match {
+//      case Success(s) => s > 0
+//      case _ => false
+//    }
+//  }
+//
+//  override def init(): Unit = {
+//    try {
+//      if (tableExists) {
+//        // drop exist table
+//        val dropSql = s"""DROP TABLE ${concreteTableName}"""
+//        sqlContext.sql(dropSql)
+//      }
+//
+//      val colsSql = schema.map { field =>
+//        s"`${field._1}` ${field._2}"
+//      }.mkString(", ")
+//      val partitionsSql = partition.map { partition =>
+//        s"`${partition._1}` ${partition._2}"
+//      }.mkString(", ")
+//      val sql = s"""CREATE EXTERNAL TABLE IF NOT EXISTS ${concreteTableName}
+//                    |(${colsSql}) PARTITIONED BY (${partitionsSql})
+//                    |ROW FORMAT DELIMITED
+//                    |FIELDS TERMINATED BY '${fieldSep}'
+//                    |LINES TERMINATED BY '${rowSep}'
+//                    |STORED AS TEXTFILE
+//                    |LOCATION '${tablePath}'""".stripMargin
+//      sqlContext.sql(sql)
+//    } catch {
+//      case e: Throwable => throw e
+//    }
+//  }
+//
+//  def available(): Boolean = {
+//    true
+//  }
+//
+//  private def encode(data: Map[String, Any], ms: Long): Option[List[Any]] = {
+//    try {
+//      Some(schema.map { field =>
+//        val (name, _) = field
+//        name match {
+//          case TimeStampColumn => ms
+//          case PayloadColumn => JsonUtil.toJson(data)
+//          case _ => null
+//        }
+//      })
+//    } catch {
+//      case _ => None
+//    }
+//  }
+//
+//  private def decode(data: List[Any], updateTimeStamp: Boolean): Option[Map[String, Any]] = {
+//    val dataMap = schemaName.zip(data).toMap
+//    dataMap.get(PayloadColumn) match {
+//      case Some(v: String) => {
+//        try {
+//          val map = JsonUtil.toAnyMap(v)
+//          val resMap = if (updateTimeStamp) {
+//            dataMap.get(TimeStampColumn) match {
+//              case Some(t) => map + (TimeStampColumn -> t)
+//              case _ => map
+//            }
+//          } else map
+//          Some(resMap)
+//        } catch {
+//          case _ => None
+//        }
+//      }
+//      case _ => None
+//    }
+//  }
+//
+//  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit = {
+//    val newCacheLocked = newCacheLock.lock(-1, TimeUnit.SECONDS)
+//    if (newCacheLocked) {
+//      try {
+//        val ptns = getPartition(ms)
+//        val ptnsPath = genPartitionHdfsPath(ptns)
+//        val dirPath = s"${tablePath}/${ptnsPath}"
+//        val fileName = s"${ms}"
+//        val filePath = HdfsUtil.getHdfsFilePath(dirPath, fileName)
+//
+//        // encode data
+//        val dataRdd: RDD[List[Any]] = rdd.flatMap(encode(_, ms))
+//
+//        // save data
+//        val recordRdd: RDD[String] = dataRdd.map { dt =>
+//          dt.map(_.toString).mkString(fieldSep)
+//        }
+//
+//        val dumped = if (!recordRdd.isEmpty) {
+//          HdfsFileDumpUtil.dump(filePath, recordRdd, rowSepLiteral)
+//        } else false
+//
+//        // add partition
+//        if (dumped) {
+//          val sql = addPartitionSql(concreteTableName, ptns)
+//          sqlContext.sql(sql)
+//        }
+//
+//        // submit ms
+//        submitCacheTime(ms)
+//        submitReadyTime(ms)
+//      } catch {
+//        case e: Throwable => error(s"save data error: ${e.getMessage}")
+//      } finally {
+//        newCacheLock.unlock()
+//      }
+//    }
+//  }
+//
+//  def readData(): Try[RDD[Map[String, Any]]] = Try {
+//    val timeRange = TimeInfoCache.getTimeRange
+//    submitLastProcTime(timeRange._2)
+//
+//    val reviseTimeRange = (timeRange._1 + deltaTimeRange._1, timeRange._2 + deltaTimeRange._2)
+//    submitCleanTime(reviseTimeRange._1)
+//
+//    // read directly through partition info
+//    val partitionRange = getPartitionRange(reviseTimeRange._1, reviseTimeRange._2)
+//    val sql = selectSql(concreteTableName, partitionRange)
+//    val df = sqlContext.sql(sql)
+//
+//    // decode data
+//    df.flatMap { row =>
+//      val dt = schemaName.map { sn =>
+//        row.getAs[Any](sn)
+//      }
+//      decode(dt, true)
+//    }
+//  }
+//
+//  override def cleanOldData(): Unit = {
+//    val oldCacheLocked = oldCacheLock.lock(-1, TimeUnit.SECONDS)
+//    if (oldCacheLocked) {
+//      try {
+//        val cleanTime = readCleanTime()
+//        cleanTime match {
+//          case Some(ct) => {
+//            // drop partition
+//            val bound = getPartition(ct)
+//            val sql = dropPartitionSql(concreteTableName, bound)
+//            sqlContext.sql(sql)
+//          }
+//          case _ => {
+//            // do nothing
+//          }
+//        }
+//      } catch {
+//        case e: Throwable => error(s"clean old data error: ${e.getMessage}")
+//      } finally {
+//        oldCacheLock.unlock()
+//      }
+//    }
+//  }
+//
+//  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
+//    // parallel process different time groups, lock is unnecessary
+//    val ptns = getPartition(t)
+//    val ptnsPath = genPartitionHdfsPath(ptns)
+//    val dirPath = s"${tablePath}/${ptnsPath}"
+//    val fileName = s"${t}"
+//    val filePath = HdfsUtil.getHdfsFilePath(dirPath, fileName)
+//
+//    try {
+//      // remove out time old data
+//      HdfsFileDumpUtil.remove(dirPath, fileName, true)
+//
+//      // save updated old data
+//      if (oldData.size > 0) {
+//        val recordDatas = oldData.flatMap { dt =>
+//          encode(dt, t)
+//        }
+//        val records: Iterable[String] = recordDatas.map { dt =>
+//          dt.map(_.toString).mkString(fieldSep)
+//        }
+//        val dumped = HdfsFileDumpUtil.dump(filePath, records, rowSepLiteral)
+//      }
+//    } catch {
+//      case e: Throwable => error(s"update old data error: ${e.getMessage}")
+//    }
+//  }
+//
+//  override protected def genCleanTime(ms: Long): Long = {
+//    val minPartition = partition.last
+//    val t1 = TimeUtil.timeToUnit(ms, minPartition._3)
+//    val t2 = TimeUtil.timeFromUnit(t1, minPartition._3)
+//    t2
+//  }
+//
+//  private def getPartition(ms: Long): List[(String, Any)] = {
+//    partition.map { p =>
+//      val (name, _, unit) = p
+//      val t = TimeUtil.timeToUnit(ms, unit)
+//      (name, t)
+//    }
+//  }
+//  private def getPartitionRange(ms1: Long, ms2: Long): List[(String, (Any, Any))] = {
+//    partition.map { p =>
+//      val (name, _, unit) = p
+//      val t1 = TimeUtil.timeToUnit(ms1, unit)
+//      val t2 = TimeUtil.timeToUnit(ms2, unit)
+//      (name, (t1, t2))
+//    }
+//  }
+//
+//  private def genPartitionHdfsPath(partition: List[(String, Any)]): String = {
+//    partition.map(prtn => s"${prtn._1}=${prtn._2}").mkString("/")
+//  }
+//  private def addPartitionSql(tbn: String, partition: List[(String, Any)]): String = {
+//    val partitionSql = partition.map(ptn => (s"`${ptn._1}` = ${ptn._2}")).mkString(", ")
+//    val sql = s"""ALTER TABLE ${tbn} ADD IF NOT EXISTS PARTITION (${partitionSql})"""
+//    sql
+//  }
+//  private def selectSql(tbn: String, partitionRange: List[(String, (Any, Any))]): String = {
+//    val clause = partitionRange.map { pr =>
+//      val (name, (r1, r2)) = pr
+//      s"""`${name}` BETWEEN '${r1}' and '${r2}'"""
+//    }.mkString(" AND ")
+//    val whereClause = if (clause.nonEmpty) s"WHERE ${clause}" else ""
+//    val sql = s"""SELECT * FROM ${tbn} ${whereClause}"""
+//    sql
+//  }
+//  private def dropPartitionSql(tbn: String, partition: List[(String, Any)]): String = {
+//    val partitionSql = partition.map(ptn => (s"PARTITION ( `${ptn._1}` < '${ptn._2}' ) ")).mkString(", ")
+//    val sql = s"""ALTER TABLE ${tbn} DROP ${partitionSql}"""
+//    println(sql)
+//    sql
+//  }
+//
+//  private def tableExistsSql(): String = {
+//    s"tableName LIKE '${tableName}'"
+//  }
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/TextCacheDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/TextCacheDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/TextCacheDataConnector.scala
new file mode 100644
index 0000000..0daf2d9
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/cache/TextCacheDataConnector.scala
@@ -0,0 +1,311 @@
+///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//package org.apache.griffin.measure.data.connector.cache
+//
+//import java.util.concurrent.TimeUnit
+//
+//import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+//import org.apache.griffin.measure.config.params.user.DataCacheParam
+//import org.apache.griffin.measure.result.TimeStampInfo
+//import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, JsonUtil, TimeUtil}
+//import org.apache.spark.rdd.RDD
+//import org.apache.spark.sql.SQLContext
+//
+//import scala.util.Try
+//
+//case class TextCacheDataConnector(sqlContext: SQLContext, dataCacheParam: DataCacheParam
+//                                 ) extends CacheDataConnector {
+//
+//  val config = dataCacheParam.config
+//  val InfoPath = "info.path"
+//  val cacheInfoPath: String = config.getOrElse(InfoPath, defCacheInfoPath).toString
+//
+//  val newCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.new")
+//  val oldCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.old")
+//
+//  val timeRangeParam: List[String] = if (dataCacheParam.timeRange != null) dataCacheParam.timeRange else Nil
+//  val deltaTimeRange: (Long, Long) = (timeRangeParam ::: List("0", "0")) match {
+//    case s :: e :: _ => {
+//      val ns = TimeUtil.milliseconds(s) match {
+//        case Some(n) if (n < 0) => n
+//        case _ => 0
+//      }
+//      val ne = TimeUtil.milliseconds(e) match {
+//        case Some(n) if (n < 0) => n
+//        case _ => 0
+//      }
+//      (ns, ne)
+//    }
+//    case _ => (0, 0)
+//  }
+//
+//  val FilePath = "file.path"
+//  val filePath: String = config.get(FilePath) match {
+//    case Some(s: String) => s
+//    case _ => throw new Exception("invalid file.path!")
+//  }
+//
+//  val ReadyTimeInterval = "ready.time.interval"
+//  val ReadyTimeDelay = "ready.time.delay"
+//  val readyTimeInterval: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeInterval, "1m").toString).getOrElse(60000L)
+//  val readyTimeDelay: Long = TimeUtil.milliseconds(config.getOrElse(ReadyTimeDelay, "1m").toString).getOrElse(60000L)
+//
+////  val TimeStampColumn: String = TimeStampInfo.key
+////  val PayloadColumn: String = "payload"
+//
+//  // cache schema: Long, String
+////  val fields = List[StructField](
+////    StructField(TimeStampColumn, LongType),
+////    StructField(PayloadColumn, StringType)
+////  )
+////  val schema = StructType(fields)
+//
+//  //  case class CacheData(time: Long, payload: String) {
+//  //    def getTime(): Long = time
+//  //    def getPayload(): String = payload
+//  //  }
+//
+//  private val rowSepLiteral = "\n"
+//
+//  val partitionUnits: List[String] = List("hour", "min")
+//
+//  override def init(): Unit = {
+//    // do nothing
+//  }
+//
+//  def available(): Boolean = {
+//    true
+//  }
+//
+//  private def encode(data: Map[String, Any], ms: Long): Option[String] = {
+//    try {
+//      val map = data + (TimeStampInfo.key -> ms)
+//      Some(JsonUtil.toJson(map))
+//    } catch {
+//      case _: Throwable => None
+//    }
+//  }
+//
+//  private def decode(data: String): Option[Map[String, Any]] = {
+//    try {
+//      Some(JsonUtil.toAnyMap(data))
+//    } catch {
+//      case _: Throwable => None
+//    }
+//  }
+//
+//  def saveData(rdd: RDD[Map[String, Any]], ms: Long): Unit = {
+//    val newCacheLocked = newCacheLock.lock(-1, TimeUnit.SECONDS)
+//    if (newCacheLocked) {
+//      try {
+//        val ptns = getPartition(ms)
+//        val ptnsPath = genPartitionHdfsPath(ptns)
+//        val dirPath = s"${filePath}/${ptnsPath}"
+//        val dataFileName = s"${ms}"
+//        val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+//
+//        // encode data
+//        val dataRdd: RDD[String] = rdd.flatMap(encode(_, ms))
+//
+//        // save data
+//        val dumped = if (!dataRdd.isEmpty) {
+//          HdfsFileDumpUtil.dump(dataFilePath, dataRdd, rowSepLiteral)
+//        } else false
+//
+//        // submit ms
+//        submitCacheTime(ms)
+//        submitReadyTime(ms)
+//      } catch {
+//        case e: Throwable => error(s"save data error: ${e.getMessage}")
+//      } finally {
+//        newCacheLock.unlock()
+//      }
+//    }
+//  }
+//
+//  def readData(): Try[RDD[Map[String, Any]]] = Try {
+//    val timeRange = TimeInfoCache.getTimeRange
+//    submitLastProcTime(timeRange._2)
+//
+//    val reviseTimeRange = (timeRange._1 + deltaTimeRange._1, timeRange._2 + deltaTimeRange._2)
+//    submitCleanTime(reviseTimeRange._1)
+//
+//    // read directly through partition info
+//    val partitionRanges = getPartitionRange(reviseTimeRange._1, reviseTimeRange._2)
+//    println(s"read time ranges: ${reviseTimeRange}")
+//    println(s"read partition ranges: ${partitionRanges}")
+//
+//    // list partition paths
+//    val partitionPaths = listPathsBetweenRanges(filePath :: Nil, partitionRanges)
+//
+//    if (partitionPaths.isEmpty) {
+//      sqlContext.sparkContext.emptyRDD[Map[String, Any]]
+//    } else {
+//      val filePaths = partitionPaths.mkString(",")
+//      val rdd = sqlContext.sparkContext.textFile(filePaths)
+//
+//      // decode data
+//      rdd.flatMap { row =>
+//        decode(row)
+//      }
+//    }
+//  }
+//
+//  override def cleanOldData(): Unit = {
+//    val oldCacheLocked = oldCacheLock.lock(-1, TimeUnit.SECONDS)
+//    if (oldCacheLocked) {
+//      try {
+//        val cleanTime = readCleanTime()
+//        cleanTime match {
+//          case Some(ct) => {
+//            // drop partitions
+//            val bounds = getPartition(ct)
+//
+//            // list partition paths
+//            val earlierPaths = listPathsEarlierThanBounds(filePath :: Nil, bounds)
+//
+//            // delete out time data path
+//            earlierPaths.foreach { path =>
+//              println(s"delete hdfs path: ${path}")
+//              HdfsUtil.deleteHdfsPath(path)
+//            }
+//          }
+//          case _ => {
+//            // do nothing
+//          }
+//        }
+//      } catch {
+//        case e: Throwable => error(s"clean old data error: ${e.getMessage}")
+//      } finally {
+//        oldCacheLock.unlock()
+//      }
+//    }
+//  }
+//
+//  override def updateOldData(t: Long, oldData: Iterable[Map[String, Any]]): Unit = {
+//    // parallel process different time groups, lock is unnecessary
+//    val ptns = getPartition(t)
+//    val ptnsPath = genPartitionHdfsPath(ptns)
+//    val dirPath = s"${filePath}/${ptnsPath}"
+//    val dataFileName = s"${t}"
+//    val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+//
+//    try {
+//      // remove out time old data
+//      HdfsFileDumpUtil.remove(dirPath, dataFileName, true)
+//
+//      // save updated old data
+//      if (oldData.size > 0) {
+//        val recordDatas = oldData.flatMap { dt =>
+//          encode(dt, t)
+//        }
+//        val dumped = HdfsFileDumpUtil.dump(dataFilePath, recordDatas, rowSepLiteral)
+//      }
+//    } catch {
+//      case e: Throwable => error(s"update old data error: ${e.getMessage}")
+//    }
+//  }
+//
+//  override protected def genCleanTime(ms: Long): Long = {
+//    val minPartitionUnit = partitionUnits.last
+//    val t1 = TimeUtil.timeToUnit(ms, minPartitionUnit)
+//    val t2 = TimeUtil.timeFromUnit(t1, minPartitionUnit)
+//    t2
+//  }
+//
+//  private def getPartition(ms: Long): List[Long] = {
+//    partitionUnits.map { unit =>
+//      TimeUtil.timeToUnit(ms, unit)
+//    }
+//  }
+//  private def getPartitionRange(ms1: Long, ms2: Long): List[(Long, Long)] = {
+//    partitionUnits.map { unit =>
+//      val t1 = TimeUtil.timeToUnit(ms1, unit)
+//      val t2 = TimeUtil.timeToUnit(ms2, unit)
+//      (t1, t2)
+//    }
+//  }
+//
+//  private def genPartitionHdfsPath(partition: List[Long]): String = {
+//    partition.map(prtn => s"${prtn}").mkString("/")
+//  }
+//
+//  private def str2Long(str: String): Option[Long] = {
+//    try {
+//      Some(str.toLong)
+//    } catch {
+//      case e: Throwable => None
+//    }
+//  }
+//
+//  // here the range means [min, max], but the best range should be (min, max]
+//  private def listPathsBetweenRanges(paths: List[String],
+//                                     partitionRanges: List[(Long, Long)]
+//                                    ): List[String] = {
+//    partitionRanges match {
+//      case Nil => paths
+//      case head :: tail => {
+//        val (lb, ub) = head
+//        val curPaths = paths.flatMap { path =>
+//          val names = HdfsUtil.listSubPaths(path, "dir").toList
+//          names.filter { name =>
+//            str2Long(name) match {
+//              case Some(t) => (t >= lb) && (t <= ub)
+//              case _ => false
+//            }
+//          }.map(HdfsUtil.getHdfsFilePath(path, _))
+//        }
+//        listPathsBetweenRanges(curPaths, tail)
+//      }
+//    }
+//  }
+//
+//  private def listPathsEarlierThanBounds(paths: List[String], bounds: List[Long]
+//                                        ): List[String] = {
+//    bounds match {
+//      case Nil => paths
+//      case head :: tail => {
+//        val earlierPaths = paths.flatMap { path =>
+//          val names = HdfsUtil.listSubPaths(path, "dir").toList
+//          names.filter { name =>
+//            str2Long(name) match {
+//              case Some(t) => (t < head)
+//              case _ => false
+//            }
+//          }.map(HdfsUtil.getHdfsFilePath(path, _))
+//        }
+//        val equalPaths = paths.flatMap { path =>
+//          val names = HdfsUtil.listSubPaths(path, "dir").toList
+//          names.filter { name =>
+//            str2Long(name) match {
+//              case Some(t) => (t == head)
+//              case _ => false
+//            }
+//          }.map(HdfsUtil.getHdfsFilePath(path, _))
+//        }
+//
+//        tail match {
+//          case Nil => earlierPaths
+//          case _ => earlierPaths ::: listPathsEarlierThanBounds(equalPaths, tail)
+//        }
+//      }
+//    }
+//  }
+//
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingDataConnector.scala
new file mode 100644
index 0000000..41de217
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingDataConnector.scala
@@ -0,0 +1,70 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.streaming
+
+import kafka.serializer.Decoder
+import org.apache.spark.streaming.dstream.InputDStream
+
+import scala.util.{Failure, Success, Try}
+import org.apache.griffin.measure.utils.ParamUtil._
+
+trait KafkaStreamingDataConnector extends StreamingDataConnector {
+
+  type KD <: Decoder[K]
+  type VD <: Decoder[V]
+
+  val config = dcParam.config
+
+  val KafkaConfig = "kafka.config"
+  val Topics = "topics"
+
+  val kafkaConfig = config.getAnyRef(KafkaConfig, Map[String, String]())
+  val topics = config.getString(Topics, "")
+
+  def available(): Boolean = {
+    true
+  }
+
+  def init(): Unit = {
+    val ds = stream match {
+      case Success(dstream) => dstream
+      case Failure(ex) => throw ex
+    }
+    ds.foreachRDD((rdd, time) => {
+      val ms = time.milliseconds
+
+      val dfOpt = transform(rdd)
+
+      val preDfOpt = preProcess(dfOpt, ms)
+
+      // save data frame
+      dataSourceCacheOpt.foreach(_.saveData(preDfOpt, ms))
+    })
+  }
+
+  def stream(): Try[InputDStream[(K, V)]] = Try {
+    val topicSet = topics.split(",").toSet
+    createDStream(topicSet)
+  }
+
+  protected def createDStream(topicSet: Set[String]): InputDStream[(K, V)]
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingStringDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingStringDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingStringDataConnector.scala
new file mode 100644
index 0000000..5e0413e
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/KafkaStreamingStringDataConnector.scala
@@ -0,0 +1,65 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.streaming
+
+import kafka.serializer.StringDecoder
+import org.apache.griffin.measure.config.params.user.DataConnectorParam
+import org.apache.griffin.measure.process.engine.DqEngines
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.dstream.InputDStream
+import org.apache.spark.streaming.kafka.KafkaUtils
+import org.apache.spark.sql.functions.lit
+
+case class KafkaStreamingStringDataConnector(sqlContext: SQLContext,
+                                             @transient ssc: StreamingContext,
+                                             dqEngines: DqEngines,
+                                             dcParam: DataConnectorParam
+                                            ) extends KafkaStreamingDataConnector {
+  type K = String
+  type KD = StringDecoder
+  type V = String
+  type VD = StringDecoder
+
+  val valueColName = "value"
+  val schema = StructType(Array(
+    StructField(valueColName, StringType)
+  ))
+
+  def createDStream(topicSet: Set[String]): InputDStream[(K, V)] = {
+    KafkaUtils.createDirectStream[K, V, KD, VD](ssc, kafkaConfig, topicSet)
+  }
+
+  def transform(rdd: RDD[(K, V)]): Option[DataFrame] = {
+    if (rdd.isEmpty) None else {
+      try {
+        val rowRdd = rdd.map(d => Row(d._2))
+        val df = sqlContext.createDataFrame(rowRdd, schema)
+        Some(df)
+      } catch {
+        case e: Throwable => {
+          error(s"streaming data transform fails")
+          None
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/StreamingDataConnector.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/StreamingDataConnector.scala b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/StreamingDataConnector.scala
new file mode 100644
index 0000000..cc21761
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/connector/streaming/StreamingDataConnector.scala
@@ -0,0 +1,43 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.connector.streaming
+
+import org.apache.griffin.measure.data.connector._
+import org.apache.griffin.measure.data.source.DataSourceCache
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.streaming.dstream.InputDStream
+
+import scala.util.Try
+
+
+trait StreamingDataConnector extends DataConnector {
+
+  type K
+  type V
+
+  protected def stream(): Try[InputDStream[(K, V)]]
+
+  def transform(rdd: RDD[(K, V)]): Option[DataFrame]
+
+  def data(ms: Long): Option[DataFrame] = None
+
+  var dataSourceCacheOpt: Option[DataSourceCache] = None
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/source/DataCacheable.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/source/DataCacheable.scala b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataCacheable.scala
new file mode 100644
index 0000000..3c9106a
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataCacheable.scala
@@ -0,0 +1,76 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.source
+
+import java.util.concurrent.atomic.AtomicLong
+
+import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+
+trait DataCacheable {
+
+  val cacheInfoPath: String
+  val readyTimeInterval: Long
+  val readyTimeDelay: Long
+
+  def selfCacheInfoPath = s"${TimeInfoCache.infoPath}/${cacheInfoPath}"
+
+  def selfCacheTime = TimeInfoCache.cacheTime(selfCacheInfoPath)
+  def selfLastProcTime = TimeInfoCache.lastProcTime(selfCacheInfoPath)
+  def selfReadyTime = TimeInfoCache.readyTime(selfCacheInfoPath)
+  def selfCleanTime = TimeInfoCache.cleanTime(selfCacheInfoPath)
+
+  protected def submitCacheTime(ms: Long): Unit = {
+    val map = Map[String, String]((selfCacheTime -> ms.toString))
+    InfoCacheInstance.cacheInfo(map)
+  }
+
+  protected def submitReadyTime(ms: Long): Unit = {
+    val curReadyTime = ms - readyTimeDelay
+    if (curReadyTime % readyTimeInterval == 0) {
+      val map = Map[String, String]((selfReadyTime -> curReadyTime.toString))
+      InfoCacheInstance.cacheInfo(map)
+    }
+  }
+
+  protected def submitLastProcTime(ms: Long): Unit = {
+    val map = Map[String, String]((selfLastProcTime -> ms.toString))
+    InfoCacheInstance.cacheInfo(map)
+  }
+
+  protected def submitCleanTime(ms: Long): Unit = {
+    val cleanTime = genCleanTime(ms)
+    val map = Map[String, String]((selfCleanTime -> cleanTime.toString))
+    InfoCacheInstance.cacheInfo(map)
+  }
+
+  protected def genCleanTime(ms: Long): Long = ms
+
+  protected def readCleanTime(): Option[Long] = {
+    val key = selfCleanTime
+    val keys = key :: Nil
+    InfoCacheInstance.readInfo(keys).get(key).flatMap { v =>
+      try {
+        Some(v.toLong)
+      } catch {
+        case _ => None
+      }
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSource.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSource.scala b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSource.scala
new file mode 100644
index 0000000..0927754
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSource.scala
@@ -0,0 +1,109 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.source
+
+import org.apache.griffin.measure.data.connector._
+import org.apache.griffin.measure.data.connector.batch._
+import org.apache.griffin.measure.data.connector.streaming._
+import org.apache.griffin.measure.log.Loggable
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+
+case class DataSource(sqlContext: SQLContext,
+                      name: String,
+                      dataConnectors: Seq[DataConnector],
+                      dataSourceCacheOpt: Option[DataSourceCache]
+                     ) extends Loggable with Serializable {
+
+  val batchDataConnectors = DataConnectorFactory.filterBatchDataConnectors(dataConnectors)
+  val streamingDataConnectors = DataConnectorFactory.filterStreamingDataConnectors(dataConnectors)
+  streamingDataConnectors.foreach(_.dataSourceCacheOpt = dataSourceCacheOpt)
+
+  def init(): Unit = {
+    dataSourceCacheOpt.foreach(_.init)
+    dataConnectors.foreach(_.init)
+  }
+
+  def loadData(ms: Long): Unit = {
+    data(ms) match {
+      case Some(df) => {
+        df.registerTempTable(name)
+      }
+      case None => {
+//        val df = sqlContext.emptyDataFrame
+//        df.registerTempTable(name)
+        warn(s"load data source [${name}] fails")
+//        throw new Exception(s"load data source [${name}] fails")
+      }
+    }
+  }
+
+  def dropTable(): Unit = {
+    try {
+      sqlContext.dropTempTable(name)
+    } catch {
+      case e: Throwable => warn(s"drop table [${name}] fails")
+    }
+  }
+
+  private def data(ms: Long): Option[DataFrame] = {
+    val batchDataFrameOpt = batchDataConnectors.flatMap { dc =>
+      dc.data(ms)
+    }.reduceOption((a, b) => unionDataFrames(a, b))
+
+    val cacheDataFrameOpt = dataSourceCacheOpt.flatMap(_.readData())
+
+    (batchDataFrameOpt, cacheDataFrameOpt) match {
+      case (Some(bdf), Some(cdf)) => Some(unionDataFrames(bdf, cdf))
+      case (Some(bdf), _) => Some(bdf)
+      case (_, Some(cdf)) => Some(cdf)
+      case _ => None
+    }
+  }
+
+  private def unionDataFrames(df1: DataFrame, df2: DataFrame): DataFrame = {
+    try {
+      val cols = df1.columns
+      val rdd2 = df2.map{ row =>
+        val values = cols.map { col =>
+          row.getAs[Any](col)
+        }
+        Row(values: _*)
+      }
+      val ndf2 = sqlContext.createDataFrame(rdd2, df1.schema)
+      df1 unionAll ndf2
+//      df1 unionAll df2
+    } catch {
+      case e: Throwable => df1
+    }
+  }
+
+  def updateData(df: DataFrame, ms: Long): Unit = {
+    dataSourceCacheOpt.foreach(_.updateData(df, ms))
+  }
+
+  def updateDataMap(dfMap: Map[Long, DataFrame]): Unit = {
+    dataSourceCacheOpt.foreach(_.updateDataMap(dfMap))
+  }
+
+  def cleanOldData(): Unit = {
+    dataSourceCacheOpt.foreach(_.cleanOldData)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceCache.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceCache.scala b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceCache.scala
new file mode 100644
index 0000000..769550f
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceCache.scala
@@ -0,0 +1,347 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.source
+
+import java.util.concurrent.TimeUnit
+
+import org.apache.griffin.measure.cache.info.{InfoCacheInstance, TimeInfoCache}
+import org.apache.griffin.measure.data.connector.streaming.StreamingDataConnector
+import org.apache.griffin.measure.data.connector._
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.utils.{HdfsFileDumpUtil, HdfsUtil, TimeUtil}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, SQLContext}
+
+import scala.util.{Failure, Success}
+import org.apache.griffin.measure.utils.ParamUtil._
+
+case class DataSourceCache(sqlContext: SQLContext, param: Map[String, Any],
+                           metricName: String, index: Int
+                          ) extends DataCacheable with Loggable with Serializable {
+
+  val name = ""
+
+  val _FilePath = "file.path"
+  val _InfoPath = "info.path"
+  val _ReadyTimeInterval = "ready.time.interval"
+  val _ReadyTimeDelay = "ready.time.delay"
+  val _TimeRange = "time.range"
+
+  val defFilePath = s"hdfs:///griffin/cache/${metricName}/${index}"
+  val defInfoPath = s"${index}"
+
+  val filePath: String = param.getString(_FilePath, defFilePath)
+  val cacheInfoPath: String = param.getString(_InfoPath, defInfoPath)
+  val readyTimeInterval: Long = TimeUtil.milliseconds(param.getString(_ReadyTimeInterval, "1m")).getOrElse(60000L)
+  val readyTimeDelay: Long = TimeUtil.milliseconds(param.getString(_ReadyTimeDelay, "1m")).getOrElse(60000L)
+  val deltaTimeRange: (Long, Long) = {
+    def negative(n: Long): Long = if (n <= 0) n else 0
+    param.get(_TimeRange) match {
+      case Some(seq: Seq[String]) => {
+        val nseq = seq.flatMap(TimeUtil.milliseconds(_))
+        val ns = negative(nseq.headOption.getOrElse(0))
+        val ne = negative(nseq.tail.headOption.getOrElse(0))
+        (ns, ne)
+      }
+      case _ => (0, 0)
+    }
+  }
+
+  val rowSepLiteral = "\n"
+  val partitionUnits: List[String] = List("hour", "min", "sec")
+
+  val newCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.new")
+  val oldCacheLock = InfoCacheInstance.genLock(s"${cacheInfoPath}.old")
+
+  def init(): Unit = {
+    ;
+  }
+
+  def saveData(dfOpt: Option[DataFrame], ms: Long): Unit = {
+    dfOpt match {
+      case Some(df) => {
+        val newCacheLocked = newCacheLock.lock(-1, TimeUnit.SECONDS)
+        if (newCacheLocked) {
+          try {
+            val ptns = getPartition(ms)
+            val ptnsPath = genPartitionHdfsPath(ptns)
+            val dirPath = s"${filePath}/${ptnsPath}"
+            val dataFileName = s"${ms}"
+            val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+
+            // transform data
+            val dataRdd: RDD[String] = df.toJSON
+
+            // save data
+            val dumped = if (!dataRdd.isEmpty) {
+              HdfsFileDumpUtil.dump(dataFilePath, dataRdd, rowSepLiteral)
+            } else false
+
+          } catch {
+            case e: Throwable => error(s"save data error: ${e.getMessage}")
+          } finally {
+            newCacheLock.unlock()
+          }
+        }
+      }
+      case _ => {
+        info(s"no data frame to save")
+      }
+    }
+
+    // submit cache time and ready time
+    submitCacheTime(ms)
+    submitReadyTime(ms)
+  }
+
+  def readData(): Option[DataFrame] = {
+    val timeRange = TimeInfoCache.getTimeRange
+    submitLastProcTime(timeRange._2)
+
+    val reviseTimeRange = (timeRange._1 + deltaTimeRange._1, timeRange._2 + deltaTimeRange._2)
+    submitCleanTime(reviseTimeRange._1)
+
+    // read directly through partition info
+    val partitionRanges = getPartitionRange(reviseTimeRange._1, reviseTimeRange._2)
+    println(s"read time ranges: ${reviseTimeRange}")
+    println(s"read partition ranges: ${partitionRanges}")
+
+    // list partition paths
+    val partitionPaths = listPathsBetweenRanges(filePath :: Nil, partitionRanges)
+
+    if (partitionPaths.isEmpty) {
+      None
+    } else {
+      try {
+        Some(sqlContext.read.json(partitionPaths: _*))
+      } catch {
+        case e: Throwable => {
+          warn(s"read data source cache warn: ${e.getMessage}")
+          None
+        }
+      }
+    }
+  }
+
+  // -- deprecated --
+  def updateData(df: DataFrame, ms: Long): Unit = {
+    val ptns = getPartition(ms)
+    val ptnsPath = genPartitionHdfsPath(ptns)
+    val dirPath = s"${filePath}/${ptnsPath}"
+    val dataFileName = s"${ms}"
+    val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+
+    try {
+      val records = df.toJSON
+      val arr = records.collect
+      val needSave = !arr.isEmpty
+
+      // remove out time old data
+      HdfsFileDumpUtil.remove(dirPath, dataFileName, true)
+      println(s"remove file path: ${dirPath}/${dataFileName}")
+
+      // save updated data
+      val dumped = if (needSave) {
+        HdfsFileDumpUtil.dump(dataFilePath, arr, rowSepLiteral)
+        println(s"update file path: ${dataFilePath}")
+      } else false
+    } catch {
+      case e: Throwable => error(s"update data error: ${e.getMessage}")
+    }
+  }
+
+  def updateData(rdd: RDD[String], ms: Long, cnt: Long): Unit = {
+    val ptns = getPartition(ms)
+    val ptnsPath = genPartitionHdfsPath(ptns)
+    val dirPath = s"${filePath}/${ptnsPath}"
+    val dataFileName = s"${ms}"
+    val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+
+    try {
+//      val needSave = !rdd.isEmpty
+
+      // remove out time old data
+      HdfsFileDumpUtil.remove(dirPath, dataFileName, true)
+      println(s"remove file path: ${dirPath}/${dataFileName}")
+
+      // save updated data
+      val dumped = if (cnt > 0) {
+        HdfsFileDumpUtil.dump(dataFilePath, rdd, rowSepLiteral)
+        println(s"update file path: ${dataFilePath}")
+      } else false
+    } catch {
+      case e: Throwable => error(s"update data error: ${e.getMessage}")
+    } finally {
+      rdd.unpersist()
+    }
+  }
+
+  def updateData(rdd: Iterable[String], ms: Long): Unit = {
+    val ptns = getPartition(ms)
+    val ptnsPath = genPartitionHdfsPath(ptns)
+    val dirPath = s"${filePath}/${ptnsPath}"
+    val dataFileName = s"${ms}"
+    val dataFilePath = HdfsUtil.getHdfsFilePath(dirPath, dataFileName)
+
+    try {
+      val needSave = !rdd.isEmpty
+
+      // remove out time old data
+      HdfsFileDumpUtil.remove(dirPath, dataFileName, true)
+      println(s"remove file path: ${dirPath}/${dataFileName}")
+
+      // save updated data
+      val dumped = if (needSave) {
+        HdfsFileDumpUtil.dump(dataFilePath, rdd, rowSepLiteral)
+        println(s"update file path: ${dataFilePath}")
+      } else false
+    } catch {
+      case e: Throwable => error(s"update data error: ${e.getMessage}")
+    }
+  }
+
+  def updateDataMap(dfMap: Map[Long, DataFrame]): Unit = {
+    val dataMap = dfMap.map { pair =>
+      val (t, recs) = pair
+      val rdd = recs.toJSON
+//      rdd.cache
+      (t, rdd, rdd.count)
+    }
+
+    dataMap.foreach { pair =>
+      val (t, arr, cnt) = pair
+      updateData(arr, t, cnt)
+    }
+  }
+
+  def cleanOldData(): Unit = {
+    val oldCacheLocked = oldCacheLock.lock(-1, TimeUnit.SECONDS)
+    if (oldCacheLocked) {
+      try {
+        val cleanTime = readCleanTime()
+        cleanTime match {
+          case Some(ct) => {
+            // drop partitions
+            val bounds = getPartition(ct)
+
+            // list partition paths
+            val earlierPaths = listPathsEarlierThanBounds(filePath :: Nil, bounds)
+
+            // delete out time data path
+            earlierPaths.foreach { path =>
+              println(s"delete hdfs path: ${path}")
+              HdfsUtil.deleteHdfsPath(path)
+            }
+          }
+          case _ => {
+            // do nothing
+          }
+        }
+      } catch {
+        case e: Throwable => error(s"clean old data error: ${e.getMessage}")
+      } finally {
+        oldCacheLock.unlock()
+      }
+    }
+  }
+
+  override protected def genCleanTime(ms: Long): Long = {
+    val minPartitionUnit = partitionUnits.last
+    val t1 = TimeUtil.timeToUnit(ms, minPartitionUnit)
+    val t2 = TimeUtil.timeFromUnit(t1, minPartitionUnit)
+    t2
+  }
+
+  private def getPartition(ms: Long): List[Long] = {
+    partitionUnits.map { unit =>
+      TimeUtil.timeToUnit(ms, unit)
+    }
+  }
+  private def getPartitionRange(ms1: Long, ms2: Long): List[(Long, Long)] = {
+    partitionUnits.map { unit =>
+      val t1 = TimeUtil.timeToUnit(ms1, unit)
+      val t2 = TimeUtil.timeToUnit(ms2, unit)
+      (t1, t2)
+    }
+  }
+  private def genPartitionHdfsPath(partition: List[Long]): String = {
+    partition.map(prtn => s"${prtn}").mkString("/")
+  }
+  private def str2Long(str: String): Option[Long] = {
+    try {
+      Some(str.toLong)
+    } catch {
+      case e: Throwable => None
+    }
+  }
+
+
+  // here the range means [min, max], but the best range should be (min, max]
+  private def listPathsBetweenRanges(paths: List[String],
+                                     partitionRanges: List[(Long, Long)]
+                                    ): List[String] = {
+    partitionRanges match {
+      case Nil => paths
+      case head :: tail => {
+        val (lb, ub) = head
+        val curPaths = paths.flatMap { path =>
+          val names = HdfsUtil.listSubPathsByType(path, "dir").toList
+          names.filter { name =>
+            str2Long(name) match {
+              case Some(t) => (t >= lb) && (t <= ub)
+              case _ => false
+            }
+          }.map(HdfsUtil.getHdfsFilePath(path, _))
+        }
+        listPathsBetweenRanges(curPaths, tail)
+      }
+    }
+  }
+  private def listPathsEarlierThanBounds(paths: List[String], bounds: List[Long]
+                                        ): List[String] = {
+    bounds match {
+      case Nil => paths
+      case head :: tail => {
+        val earlierPaths = paths.flatMap { path =>
+          val names = HdfsUtil.listSubPathsByType(path, "dir").toList
+          names.filter { name =>
+            str2Long(name) match {
+              case Some(t) => (t < head)
+              case _ => false
+            }
+          }.map(HdfsUtil.getHdfsFilePath(path, _))
+        }
+        val equalPaths = paths.flatMap { path =>
+          val names = HdfsUtil.listSubPathsByType(path, "dir").toList
+          names.filter { name =>
+            str2Long(name) match {
+              case Some(t) => (t == head)
+              case _ => false
+            }
+          }.map(HdfsUtil.getHdfsFilePath(path, _))
+        }
+
+        tail match {
+          case Nil => earlierPaths
+          case _ => earlierPaths ::: listPathsEarlierThanBounds(equalPaths, tail)
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceFactory.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceFactory.scala b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceFactory.scala
new file mode 100644
index 0000000..6c1b76e
--- /dev/null
+++ b/measure/src/main/scala/org/apache/griffin/measure/data/source/DataSourceFactory.scala
@@ -0,0 +1,80 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.measure.data.source
+
+import org.apache.griffin.measure.config.params.user._
+import org.apache.griffin.measure.data.connector.batch.BatchDataConnector
+import org.apache.griffin.measure.data.connector.streaming.StreamingDataConnector
+import org.apache.griffin.measure.data.connector.{DataConnector, DataConnectorFactory}
+import org.apache.griffin.measure.log.Loggable
+import org.apache.griffin.measure.process.engine.{DqEngine, DqEngines}
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.streaming.StreamingContext
+
+import scala.util.{Success, Try}
+
+object DataSourceFactory extends Loggable {
+
+  val HiveRegex = """^(?i)hive$""".r
+  val TextRegex = """^(?i)text$""".r
+  val AvroRegex = """^(?i)avro$""".r
+
+  def genDataSources(sqlContext: SQLContext, ssc: StreamingContext, dqEngines: DqEngines,
+                     dataSourceParams: Seq[DataSourceParam], metricName: String): Seq[DataSource] = {
+    dataSourceParams.zipWithIndex.flatMap { pair =>
+      val (param, index) = pair
+      genDataSource(sqlContext, ssc, dqEngines, param, metricName, index)
+    }
+  }
+
+  private def genDataSource(sqlContext: SQLContext, ssc: StreamingContext,
+                            dqEngines: DqEngines,
+                            dataSourceParam: DataSourceParam,
+                            metricName: String, index: Int
+                           ): Option[DataSource] = {
+    val name = dataSourceParam.name
+    val connectorParams = dataSourceParam.connectors
+    val cacheParam = dataSourceParam.cache
+    val dataConnectors = connectorParams.flatMap { connectorParam =>
+      DataConnectorFactory.getDataConnector(sqlContext, ssc, dqEngines, connectorParam) match {
+        case Success(connector) => Some(connector)
+        case _ => None
+      }
+    }
+    val dataSourceCacheOpt = genDataSourceCache(sqlContext, cacheParam, metricName, index)
+
+    Some(DataSource(sqlContext, name, dataConnectors, dataSourceCacheOpt))
+  }
+
+  private def genDataSourceCache(sqlContext: SQLContext, param: Map[String, Any],
+                                 metricName: String, index: Int
+                                ) = {
+    if (param != null) {
+      try {
+        Some(DataSourceCache(sqlContext, param, metricName, index))
+      } catch {
+        case e: Throwable => {
+          error(s"generate data source cache fails")
+          None
+        }
+      }
+    } else None
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/4aa6f779/measure/src/main/scala/org/apache/griffin/measure/persist/HdfsPersist.scala
----------------------------------------------------------------------
diff --git a/measure/src/main/scala/org/apache/griffin/measure/persist/HdfsPersist.scala b/measure/src/main/scala/org/apache/griffin/measure/persist/HdfsPersist.scala
index 97786c4..431fe10 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/persist/HdfsPersist.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/persist/HdfsPersist.scala
@@ -21,10 +21,12 @@ package org.apache.griffin.measure.persist
 import java.util.Date
 
 import org.apache.griffin.measure.result._
-import org.apache.griffin.measure.utils.HdfsUtil
+import org.apache.griffin.measure.utils.{HdfsUtil, JsonUtil}
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
 
 import scala.util.Try
+import org.apache.griffin.measure.utils.ParamUtil._
 
 // persist result and data to hdfs
 case class HdfsPersist(config: Map[String, Any], metricName: String, timeStamp: Long) extends Persist {
@@ -34,17 +36,17 @@ case class HdfsPersist(config: Map[String, Any], metricName: String, timeStamp:
   val MaxLinesPerFile = "max.lines.per.file"
 
   val path = config.getOrElse(Path, "").toString
-  val maxPersistLines = try { config.getOrElse(MaxPersistLines, -1).toString.toInt } catch { case _ => -1 }
-  val maxLinesPerFile = try { config.getOrElse(MaxLinesPerFile, 10000).toString.toLong } catch { case _ => 10000 }
+  val maxPersistLines = config.getInt(MaxPersistLines, -1)
+  val maxLinesPerFile = config.getLong(MaxLinesPerFile, 10000)
 
   val separator = "/"
 
   val StartFile = filePath("_START")
   val FinishFile = filePath("_FINISH")
-  val ResultFile = filePath("_RESULT")
+  val MetricsFile = filePath("_METRICS")
 
-  val MissRecFile = filePath("_MISSREC")      // optional
-  val MatchRecFile = filePath("_MATCHREC")    // optional
+//  val MissRecFile = filePath("_MISSREC")      // optional
+//  val MatchRecFile = filePath("_MATCHREC")    // optional
 
   val LogFile = filePath("_LOG")
 
@@ -56,7 +58,7 @@ case class HdfsPersist(config: Map[String, Any], metricName: String, timeStamp:
   }
 
   def available(): Boolean = {
-    (path.nonEmpty) && (maxPersistLines < Int.MaxValue)
+    path.nonEmpty
   }
 
   private def persistHead: String = {
@@ -92,57 +94,141 @@ case class HdfsPersist(config: Map[String, Any], metricName: String, timeStamp:
     }
   }
 
-  def result(rt: Long, result: Result): Unit = {
-    try {
-      val resStr = result match {
-        case ar: AccuracyResult => {
-          s"match percentage: ${ar.matchPercentage}\ntotal count: ${ar.getTotal}\nmiss count: ${ar.getMiss}, match count: ${ar.getMatch}"
-        }
-        case pr: ProfileResult => {
-          s"match percentage: ${pr.matchPercentage}\ntotal count: ${pr.getTotal}\nmiss count: ${pr.getMiss}, match count: ${pr.getMatch}"
-        }
-        case _ => {
-          s"result: ${result}"
-        }
-      }
-      HdfsUtil.writeContent(ResultFile, timeHead(rt) + resStr)
-      log(rt, resStr)
+//  def result(rt: Long, result: Result): Unit = {
+//    try {
+//      val resStr = result match {
+//        case ar: AccuracyResult => {
+//          s"match percentage: ${ar.matchPercentage}\ntotal count: ${ar.getTotal}\nmiss count: ${ar.getMiss}, match count: ${ar.getMatch}"
+//        }
+//        case pr: ProfileResult => {
+//          s"match percentage: ${pr.matchPercentage}\ntotal count: ${pr.getTotal}\nmiss count: ${pr.getMiss}, match count: ${pr.getMatch}"
+//        }
+//        case _ => {
+//          s"result: ${result}"
+//        }
+//      }
+//      HdfsUtil.writeContent(ResultFile, timeHead(rt) + resStr)
+//      log(rt, resStr)
+//
+//      info(resStr)
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
 
-      info(resStr)
-    } catch {
-      case e: Throwable => error(e.getMessage)
-    }
+  // need to avoid string too long
+//  private def rddRecords(records: RDD[String], path: String): Unit = {
+//    try {
+//      val recordCount = records.count
+//      val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
+//      if (count > 0) {
+//        val groupCount = ((count - 1) / maxLinesPerFile + 1).toInt
+//        if (groupCount <= 1) {
+//          val recs = records.take(count.toInt)
+//          persistRecords(path, recs)
+//        } else {
+//          val groupedRecords: RDD[(Long, Iterable[String])] =
+//            records.zipWithIndex.flatMap { r =>
+//              val gid = r._2 / maxLinesPerFile
+//              if (gid < groupCount) Some((gid, r._1)) else None
+//            }.groupByKey()
+//          groupedRecords.foreach { group =>
+//            val (gid, recs) = group
+//            val hdfsPath = if (gid == 0) path else withSuffix(path, gid.toString)
+//            persistRecords(hdfsPath, recs)
+//          }
+//        }
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+//
+//  private def iterableRecords(records: Iterable[String], path: String): Unit = {
+//    try {
+//      val recordCount = records.size
+//      val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
+//      if (count > 0) {
+//        val groupCount = ((count - 1) / maxLinesPerFile + 1).toInt
+//        if (groupCount <= 1) {
+//          val recs = records.take(count.toInt)
+//          persistRecords(path, recs)
+//        } else {
+//          val groupedRecords = records.grouped(groupCount).zipWithIndex
+//          groupedRecords.take(groupCount).foreach { group =>
+//            val (recs, gid) = group
+//            val hdfsPath = if (gid == 0) path else withSuffix(path, gid.toString)
+//            persistRecords(hdfsPath, recs)
+//          }
+//        }
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+//
+//  def records(recs: RDD[String], tp: String): Unit = {
+//    tp match {
+//      case PersistDataType.MISS => rddRecords(recs, MissRecFile)
+//      case PersistDataType.MATCH => rddRecords(recs, MatchRecFile)
+//      case _ => {}
+//    }
+//  }
+//
+//  def records(recs: Iterable[String], tp: String): Unit = {
+//    tp match {
+//      case PersistDataType.MISS => iterableRecords(recs, MissRecFile)
+//      case PersistDataType.MATCH => iterableRecords(recs, MatchRecFile)
+//      case _ => {}
+//    }
+//  }
+
+  private def persistRecords(hdfsPath: String, records: Iterable[String]): Unit = {
+    val recStr = records.mkString("\n")
+    HdfsUtil.writeContent(hdfsPath, recStr)
   }
 
-  // need to avoid string too long
-  private def rddRecords(records: RDD[String], path: String): Unit = {
+  def log(rt: Long, msg: String): Unit = {
     try {
-      val recordCount = records.count
-      val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
-      if (count > 0) {
-        val groupCount = ((count - 1) / maxLinesPerFile + 1).toInt
-        if (groupCount <= 1) {
-          val recs = records.take(count.toInt)
-          persistRecords(path, recs)
-        } else {
-          val groupedRecords: RDD[(Long, Iterable[String])] =
-            records.zipWithIndex.flatMap { r =>
-              val gid = r._2 / maxLinesPerFile
-              if (gid < groupCount) Some((gid, r._1)) else None
-            }.groupByKey()
-          groupedRecords.foreach { group =>
-            val (gid, recs) = group
-            val hdfsPath = if (gid == 0) path else withSuffix(path, gid.toString)
-            persistRecords(hdfsPath, recs)
-          }
-        }
-      }
+      val logStr = (if (isInit) persistHead else "") + timeHead(rt) + s"${msg}\n\n"
+      HdfsUtil.appendContent(LogFile, logStr)
     } catch {
       case e: Throwable => error(e.getMessage)
     }
   }
 
-  private def iterableRecords(records: Iterable[String], path: String): Unit = {
+
+//  def persistRecords(df: DataFrame, name: String): Unit = {
+//    val records = df.toJSON
+//    val path = filePath(name)
+//    try {
+//      val recordCount = records.count
+//      val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
+//      if (count > 0) {
+//        val groupCount = ((count - 1) / maxLinesPerFile + 1).toInt
+//        if (groupCount <= 1) {
+//          val recs = records.take(count.toInt)
+//          persistRecords(path, recs)
+//        } else {
+//          val groupedRecords: RDD[(Long, Iterable[String])] =
+//            records.zipWithIndex.flatMap { r =>
+//              val gid = r._2 / maxLinesPerFile
+//              if (gid < groupCount) Some((gid, r._1)) else None
+//            }.groupByKey()
+//          groupedRecords.foreach { group =>
+//            val (gid, recs) = group
+//            val hdfsPath = if (gid == 0) path else withSuffix(path, gid.toString)
+//            persistRecords(hdfsPath, recs)
+//          }
+//        }
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
+//  }
+
+  def persistRecords(records: Iterable[String], name: String): Unit = {
+    val path = filePath(name)
     try {
       val recordCount = records.size
       val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
@@ -165,39 +251,35 @@ case class HdfsPersist(config: Map[String, Any], metricName: String, timeStamp:
     }
   }
 
-  def records(recs: RDD[String], tp: String): Unit = {
-    tp match {
-      case PersistType.MISS => rddRecords(recs, MissRecFile)
-      case PersistType.MATCH => rddRecords(recs, MatchRecFile)
-      case _ => {}
-    }
-  }
-
-  def records(recs: Iterable[String], tp: String): Unit = {
-    tp match {
-      case PersistType.MISS => iterableRecords(recs, MissRecFile)
-      case PersistType.MATCH => iterableRecords(recs, MatchRecFile)
-      case _ => {}
-    }
-  }
-
-//  def missRecords(records: RDD[String]): Unit = {
-//    rddRecords(records, MissRecFile)
+//  def persistMetrics(metrics: Seq[String], name: String): Unit = {
+//    val path = filePath(name)
+//    try {
+//      val recordCount = metrics.size
+//      val count = if (maxPersistLines < 0) recordCount else scala.math.min(maxPersistLines, recordCount)
+//      if (count > 0) {
+//        val groupCount = ((count - 1) / maxLinesPerFile + 1).toInt
+//        if (groupCount <= 1) {
+//          val recs = metrics.take(count.toInt)
+//          persistRecords(path, recs)
+//        } else {
+//          val groupedRecords = metrics.grouped(groupCount).zipWithIndex
+//          groupedRecords.take(groupCount).foreach { group =>
+//            val (recs, gid) = group
+//            val hdfsPath = if (gid == 0) path else withSuffix(path, gid.toString)
+//            persistRecords(hdfsPath, recs)
+//          }
+//        }
+//      }
+//    } catch {
+//      case e: Throwable => error(e.getMessage)
+//    }
 //  }
-//
-//  def matchRecords(records: RDD[String]): Unit = {
-//    rddRecords(records, MatchRecFile)
-//  }
-
-  private def persistRecords(hdfsPath: String, records: Iterable[String]): Unit = {
-    val recStr = records.mkString("\n")
-    HdfsUtil.writeContent(hdfsPath, recStr)
-  }
 
-  def log(rt: Long, msg: String): Unit = {
+  def persistMetrics(metrics: Map[String, Any]): Unit = {
+    val json = JsonUtil.toJson(metrics)
     try {
-      val logStr = (if (isInit) persistHead else "") + timeHead(rt) + s"${msg}\n\n"
-      HdfsUtil.appendContent(LogFile, logStr)
+      info(s"${json}")
+      persistRecords(MetricsFile, json :: Nil)
     } catch {
       case e: Throwable => error(e.getMessage)
     }