You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2018/05/22 16:45:12 UTC
[30/50] [abbrv] carbondata git commit: [CARBONDATA-2354] fixed
streaming example
[CARBONDATA-2354] fixed streaming example
This closes #2182
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/b8d5abf2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/b8d5abf2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/b8d5abf2
Branch: refs/heads/branch-1.4
Commit: b8d5abf2efed1e8cc07c62aa180161cec73120a2
Parents: 784b22d
Author: vandana <va...@gmail.com>
Authored: Wed Apr 18 16:57:17 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Sat May 19 10:25:07 2018 +0800
----------------------------------------------------------------------
docs/streaming-guide.md | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/b8d5abf2/docs/streaming-guide.md
----------------------------------------------------------------------
diff --git a/docs/streaming-guide.md b/docs/streaming-guide.md
index 3ea2881..a9b174f 100644
--- a/docs/streaming-guide.md
+++ b/docs/streaming-guide.md
@@ -27,11 +27,11 @@ Start spark-shell in new terminal, type :paste, then copy and run the following
import org.apache.spark.sql.{CarbonEnv, SparkSession}
import org.apache.spark.sql.CarbonSession._
import org.apache.spark.sql.streaming.{ProcessingTime, StreamingQuery}
- import org.apache.carbondata.core.util.path.CarbonStorePath
-
+ import org.apache.carbondata.core.util.path.CarbonTablePath
+
val warehouse = new File("./warehouse").getCanonicalPath
val metastore = new File("./metastore").getCanonicalPath
-
+
val spark = SparkSession
.builder()
.master("local")
@@ -54,8 +54,8 @@ Start spark-shell in new terminal, type :paste, then copy and run the following
| TBLPROPERTIES('streaming'='true')""".stripMargin)
val carbonTable = CarbonEnv.getCarbonTable(Some("default"), "carbon_table")(spark)
- val tablePath = CarbonStorePath.getCarbonTablePath(carbonTable.getAbsoluteTableIdentifier)
-
+ val tablePath = carbonTable.getTablePath
+
// batch load
var qry: StreamingQuery = null
val readSocketDF = spark.readStream
@@ -68,7 +68,7 @@ Start spark-shell in new terminal, type :paste, then copy and run the following
qry = readSocketDF.writeStream
.format("carbondata")
.trigger(ProcessingTime("5 seconds"))
- .option("checkpointLocation", tablePath.getStreamingCheckpointDir)
+ .option("checkpointLocation", CarbonTablePath.getStreamingCheckpointDir(tablePath))
.option("dbName", "default")
.option("tableName", "carbon_table")
.start()