You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sedona.apache.org by ji...@apache.org on 2021/03/18 20:04:45 UTC
[incubator-sedona] branch master updated: Add new Adapter examples
to RDD example projects and SQL Java Adapter test
This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sedona.git
The following commit(s) were added to refs/heads/master by this push:
new 55e1ac2 Add new Adapter examples to RDD example projects and SQL Java Adapter test
55e1ac2 is described below
commit 55e1ac2872ed3199dddb578b2b972b185e2c6e57
Author: Jia Yu <ji...@apache.org>
AuthorDate: Thu Mar 18 13:04:35 2021 -0700
Add new Adapter examples to RDD example projects and SQL Java Adapter test
---
examples/rdd-colocation-mining/src/main/scala/ScalaExample.scala | 5 +++--
sql/src/test/java/org/apache/sedona/sql/adapterTestJava.java | 3 ++-
2 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/examples/rdd-colocation-mining/src/main/scala/ScalaExample.scala b/examples/rdd-colocation-mining/src/main/scala/ScalaExample.scala
index 05b2ac3..f8a7b1e 100644
--- a/examples/rdd-colocation-mining/src/main/scala/ScalaExample.scala
+++ b/examples/rdd-colocation-mining/src/main/scala/ScalaExample.scala
@@ -131,7 +131,7 @@ object ScalaExample extends App{
tripDf.show() // Optional
// Convert from DataFrame to RDD. This can also be done directly through Sedona RDD API.
tripDf.createOrReplaceTempView("tripdf")
- var tripRDD = Adapter.toSpatialRdd(sparkSession.sql("select ST_Point(cast(tripdf._c0 as Decimal(24, 14)), cast(tripdf._c1 as Decimal(24, 14))) as point from tripdf")
+ var tripRDD = Adapter.toSpatialRdd(sparkSession.sql("select ST_Point(cast(tripdf._c0 as Decimal(24, 14)), cast(tripdf._c1 as Decimal(24, 14))) as point, 'def' as trip_attr from tripdf")
, "point")
// Convert the Coordinate Reference System from degree-based to meter-based. This returns the accurate distance calculate.
@@ -167,7 +167,8 @@ object ScalaExample extends App{
var adjacentMatrix = JoinQuery.DistanceJoinQueryFlat(tripRDD, bufferedArealmRDD,true,true)
// Uncomment the following two lines if you want to see what the join result looks like in SparkSQL
-// var adjacentMatrixDf = Adapter.toDf(adjacentMatrix, sparkSession)
+// import scala.collection.JavaConversions._
+// var adjacentMatrixDf = Adapter.toDf(adjacentMatrix, arealmRDD.fieldNames, tripRDD.fieldNames, sparkSession)
// adjacentMatrixDf.show()
var observedK = adjacentMatrix.count()*area*1.0/(arealmRDD.approximateTotalCount*tripRDD.approximateTotalCount)
diff --git a/sql/src/test/java/org/apache/sedona/sql/adapterTestJava.java b/sql/src/test/java/org/apache/sedona/sql/adapterTestJava.java
index 793cda5..d94b54f 100644
--- a/sql/src/test/java/org/apache/sedona/sql/adapterTestJava.java
+++ b/sql/src/test/java/org/apache/sedona/sql/adapterTestJava.java
@@ -39,6 +39,7 @@ import org.apache.spark.sql.SparkSession;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
+import scala.collection.JavaConverters;
import java.io.Serializable;
@@ -189,7 +190,7 @@ public class adapterTestJava
JavaPairRDD joinResultPairRDD = JoinQuery.SpatialJoinQueryFlat(pointRDD, polygonRDD, true, true);
- Dataset joinResultDf = Adapter.toDf(joinResultPairRDD, sparkSession);
+ Dataset joinResultDf = Adapter.toDf(joinResultPairRDD, JavaConverters.asScalaBuffer(pointRDD.fieldNames), JavaConverters.asScalaBuffer(polygonRDD.fieldNames), sparkSession);
joinResultDf.show(1);
}