You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sedona.apache.org by ji...@apache.org on 2020/10/05 06:14:30 UTC

[incubator-sedona] branch jtsplus updated: Fix all issues and now Sedona is working with JTsplus 1.16, GeoTools 23.2

This is an automated email from the ASF dual-hosted git repository.

jiayu pushed a commit to branch jtsplus
in repository https://gitbox.apache.org/repos/asf/incubator-sedona.git


The following commit(s) were added to refs/heads/jtsplus by this push:
     new cd3b380  Fix all issues and now Sedona is working with JTsplus 1.16, GeoTools 23.2
cd3b380 is described below

commit cd3b38007f82448d2f7c42b79b9ee7880c2b1079
Author: Jia Yu <ji...@apache.org>
AuthorDate: Sun Oct 4 23:14:17 2020 -0700

    Fix all issues and now Sedona is working with JTsplus 1.16, GeoTools 23.2
---
 .gitmodules                                        |   1 +
 contrib/R                                          |   2 +-
 core/pom.xml                                       |  12 +--
 .../geospark/geometryObjects/Circle.java           |  16 +--
 .../spatialPartitioning/RtreePartitioning.java     |   2 +-
 jts                                                |   2 +-
 pom.xml                                            |   8 +-
 .../sql/geosparksql/expressions/Functions.scala    |   2 +-
 .../datasyslab/geosparksql/adapterTestJava.java    |  41 ++------
 .../datasyslab/geosparksql/adapterTestScala.scala  |  45 ++-------
 .../geosparksql/aggregateFunctionTestScala.scala   |   9 --
 .../geosparksql/constructorTestScala.scala         |  17 +---
 .../datasyslab/geosparksql/functionTestScala.scala |  39 ++------
 .../geosparksql/predicateJoinTestScala.scala       | 107 +--------------------
 .../geosparksql/predicateTestScala.scala           |  14 ---
 .../geosparkviz/sql/optVizOperatorTest.scala       |   4 +-
 .../geosparkviz/sql/standardVizOperatorTest.scala  |   8 +-
 17 files changed, 55 insertions(+), 274 deletions(-)

diff --git a/.gitmodules b/.gitmodules
index a00486b..3701fc1 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -4,3 +4,4 @@
 [submodule "jts"]
 	path = jts
 	url = https://github.com/jiayuasu/jts.git
+	branch = 1.16.x
diff --git a/contrib/R b/contrib/R
index 4c96bc2..732c6ac 160000
--- a/contrib/R
+++ b/contrib/R
@@ -1 +1 @@
-Subproject commit 4c96bc2c698855ceda0339f80fbcebf88b4d3838
+Subproject commit 732c6ac0898d6018599b6ab4ca651e631e2f4e69
diff --git a/core/pom.xml b/core/pom.xml
index e37df91..13bc064 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -33,6 +33,12 @@
 
     <dependencies>
         <dependency>
+            <groupId>org.locationtech.jts</groupId>
+            <artifactId>jts-core</artifactId>
+            <version>1.16.2-SNAPSHOT</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
             <groupId>org.wololo</groupId>
             <artifactId>jts2geojson</artifactId>
             <version>0.13.0</version>
@@ -139,12 +145,6 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        <dependency>
-            <groupId>org.locationtech.jts</groupId>
-            <artifactId>jts-core</artifactId>
-            <version>1.18.0-SNAPSHOT</version>
-            <scope>compile</scope>
-        </dependency>
     </dependencies>
     <build>
         <sourceDirectory>src/main/java</sourceDirectory>
diff --git a/core/src/main/java/org/datasyslab/geospark/geometryObjects/Circle.java b/core/src/main/java/org/datasyslab/geospark/geometryObjects/Circle.java
index 2b2720f..f5cf610 100644
--- a/core/src/main/java/org/datasyslab/geospark/geometryObjects/Circle.java
+++ b/core/src/main/java/org/datasyslab/geospark/geometryObjects/Circle.java
@@ -372,14 +372,6 @@ public class Circle
         return newCircle;
     }
 
-    @Override
-    protected Geometry reverseInternal()
-    {
-        Geometry g = this.centerGeometry.reverse();
-        Circle newCircle = new Circle(this.centerGeometry.reverse(), this.radius);
-        return new Circle(this.centerGeometry.reverse(), this.radius);
-    }
-
     public Geometry copy()
     {
         Circle cloneCircle = new Circle(this.centerGeometry.copy(), this.radius);
@@ -506,14 +498,10 @@ public class Circle
         return 0;
     }
 
-    /**
-     * TypeCode 0 - 7 have been reserved for other geometry types
-     * @return
-     */
     @Override
-    protected int getTypeCode()
+    protected int getSortIndex()
     {
-        return 8;
+        return 0;
     }
 
     @Override
diff --git a/core/src/main/java/org/datasyslab/geospark/spatialPartitioning/RtreePartitioning.java b/core/src/main/java/org/datasyslab/geospark/spatialPartitioning/RtreePartitioning.java
index c6879af..6e34308 100644
--- a/core/src/main/java/org/datasyslab/geospark/spatialPartitioning/RtreePartitioning.java
+++ b/core/src/main/java/org/datasyslab/geospark/spatialPartitioning/RtreePartitioning.java
@@ -52,7 +52,7 @@ public class RtreePartitioning
             strtree.insert(sample, sample);
         }
 
-        List<Envelope> envelopes = strtree.itemsTree().queryBoundary();
+        List<Envelope> envelopes = strtree.findLeafBounds();
         for (Envelope envelope : envelopes) {
             grids.add(envelope);
         }
diff --git a/jts b/jts
index 7ba7672..41df452 160000
--- a/jts
+++ b/jts
@@ -1 +1 @@
-Subproject commit 7ba76724e77900cead41cf4c384521d6374180d8
+Subproject commit 41df452efc4ce7c76499e35a0bf4526d771165f9
diff --git a/pom.xml b/pom.xml
index 89e634f..f34c442 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,7 +46,7 @@
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <scala.version>2.12.8</scala.version>
         <scala.compat.version>2.12</scala.compat.version>
-        <geotools.version>22.2</geotools.version>
+        <geotools.version>23.2</geotools.version>
         <spark.version>3.0.0</spark.version>
     </properties>
     <dependencies>
@@ -142,6 +142,12 @@
             <version>3.1.1</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-compiler</artifactId>
+            <version>${scala.version}</version>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
     <repositories>
         <repository>
diff --git a/sql/src/main/scala/org/apache/spark/sql/geosparksql/expressions/Functions.scala b/sql/src/main/scala/org/apache/spark/sql/geosparksql/expressions/Functions.scala
index 853142f..53b5a75 100644
--- a/sql/src/main/scala/org/apache/spark/sql/geosparksql/expressions/Functions.scala
+++ b/sql/src/main/scala/org/apache/spark/sql/geosparksql/expressions/Functions.scala
@@ -42,7 +42,7 @@ import scala.collection.mutable.ArrayBuffer
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.types.ArrayType
 import implicits._
-import org.geotools.factory.Hints
+import org.geotools.util.factory.Hints
 import org.opengis.referencing.crs.CoordinateReferenceSystem
 
 import scala.util.{Failure, Success, Try}
diff --git a/sql/src/test/java/org/datasyslab/geosparksql/adapterTestJava.java b/sql/src/test/java/org/datasyslab/geosparksql/adapterTestJava.java
index 65f3abc..b62105a 100644
--- a/sql/src/test/java/org/datasyslab/geosparksql/adapterTestJava.java
+++ b/sql/src/test/java/org/datasyslab/geosparksql/adapterTestJava.java
@@ -83,98 +83,77 @@ public class adapterTestJava
     public void testReadCsv()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", "\t").option("header", "false").load(csvPointInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_PointFromText(inputtable._c0,\",\") as arealandmark from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadCsvUsingCoordinates()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_Point(cast(inputtable._c0 as Decimal(24,20)),cast(inputtable._c1 as Decimal(24,20))) as arealandmark from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadCsvWithIdUsingCoordinates()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_Point(cast(inputtable._c0 as Decimal(24,20)),cast(inputtable._c1 as Decimal(24,20))) as arealandmark from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadWkt()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_GeomFromWKT(inputtable._c0) as usacounty from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadWktWithId()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_GeomFromWKT(inputtable._c0) as usacounty from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadWkb()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", "\t").option("header", "false").load(mixedWkbGeometryInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_GeomFromWKB(inputtable._c0) as usacounty from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
     public void testReadWkbWithId()
     {
         Dataset<Row> df = sparkSession.read().format("csv").option("delimiter", "\t").option("header", "false").load(mixedWkbGeometryInputLocation);
-        df.show();
         df.createOrReplaceTempView("inputtable");
         Dataset<Row> spatialDf = sparkSession.sql("select ST_GeomFromWKB(inputtable._c0) as usacounty from inputtable");
-        spatialDf.show();
-        spatialDf.printSchema();
         SpatialRDD spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty");
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
@@ -182,7 +161,7 @@ public class adapterTestJava
     {
         SpatialRDD spatialRDD = ShapefileReader.readToGeometryRDD(JavaSparkContext.fromSparkContext(sparkSession.sparkContext()), shapefileInputLocation);
         spatialRDD.analyze();
-        Adapter.toDf(spatialRDD, sparkSession).show();
+        Adapter.toDf(spatialRDD, sparkSession).show(1);
     }
 
     @Test
@@ -210,7 +189,7 @@ public class adapterTestJava
 
         Dataset joinResultDf = Adapter.toDf(joinResultPairRDD, sparkSession);
 
-        joinResultDf.show();
+        joinResultDf.show(1);
     }
 
     @Test
@@ -240,7 +219,7 @@ public class adapterTestJava
 
         Dataset joinResultDf = Adapter.toDf(joinResultPairRDD, sparkSession);
 
-        joinResultDf.show();
+        joinResultDf.show(1);
     }
 
     /**
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/adapterTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/adapterTestScala.scala
index a592a3d..f3d338e 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/adapterTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/adapterTestScala.scala
@@ -38,112 +38,84 @@ class adapterTestScala extends TestBaseScala {
 
     it("Read CSV point into a SpatialRDD") {
       var df = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(arealmPointInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select ST_PointFromText(inputtable._c0,\",\") as arealandmark from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark")
       spatialRDD.analyze()
-      Adapter.toDf(spatialRDD, sparkSession).show()
+      Adapter.toDf(spatialRDD, sparkSession).show(1)
     }
 
     it("Read CSV point at a different column id into a SpatialRDD") {
       var df = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(arealmPointInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select \'123\', \'456\', ST_PointFromText(inputtable._c0,\",\") as arealandmark, \'789\' from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, 2)
       spatialRDD.analyze()
       val newDf = Adapter.toDf(spatialRDD, sparkSession)
-      newDf.show()
       assert(newDf.schema.toList.map(f=>f.name).mkString("\t").equals("geometry\t123\t456\t789"))
     }
 
     it("Read CSV point at a different column col name into a SpatialRDD") {
       var df = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(arealmPointInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select \'123\', \'456\', ST_PointFromText(inputtable._c0,\",\") as arealandmark, \'789\' from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark")
       spatialRDD.analyze()
       val newDf = Adapter.toDf(spatialRDD, sparkSession)
-      newDf.show()
       assert(newDf.schema.toList.map(f=>f.name).mkString("\t").equals("geometry\t123\t456\t789"))
     }
 
     it("Read CSV point into a SpatialRDD by passing coordinates") {
       var df = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(arealmPointInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select ST_Point(cast(inputtable._c0 as Decimal(24,20)),cast(inputtable._c1 as Decimal(24,20))) as arealandmark from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark")
       assert(Adapter.toDf(spatialRDD, sparkSession).columns.length==1)
-      Adapter.toDf(spatialRDD, sparkSession).show()
+//      Adapter.toDf(spatialRDD, sparkSession).show(1)
     }
 
     it("Read CSV point into a SpatialRDD with unique Id by passing coordinates") {
       var df = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(arealmPointInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       // Use Column _c0 as the unique Id but the id can be anything in the same row
       var spatialDf = sparkSession.sql("select ST_Point(cast(inputtable._c0 as Decimal(24,20)),cast(inputtable._c1 as Decimal(24,20))) as arealandmark from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "arealandmark")
       spatialRDD.analyze()
       assert(Adapter.toDf(spatialRDD, sparkSession).columns.length==1)
-      Adapter.toDf(spatialRDD, sparkSession).show()
+//      Adapter.toDf(spatialRDD, sparkSession).show(1)
     }
 
 
     it("Read mixed WKT geometries into a SpatialRDD") {
       var df = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select ST_GeomFromWKT(inputtable._c0) as usacounty from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty")
       spatialRDD.analyze()
-      Adapter.toDf(spatialRDD, sparkSession).show()
       assert(Adapter.toDf(spatialRDD, sparkSession).columns.length==1)
-      Adapter.toDf(spatialRDD, sparkSession).show()
     }
 
     it("Read mixed WKT geometries into a SpatialRDD with uniqueId") {
       var df = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
-      df.show()
       df.createOrReplaceTempView("inputtable")
       var spatialDf = sparkSession.sql("select ST_GeomFromWKT(inputtable._c0) as usacounty, inputtable._c3, inputtable._c5 from inputtable")
-      spatialDf.show()
-      spatialDf.printSchema()
       var spatialRDD = Adapter.toSpatialRdd(spatialDf, "usacounty")
       spatialRDD.analyze()
       assert(Adapter.toDf(spatialRDD, sparkSession).columns.length==3)
-      Adapter.toDf(spatialRDD, sparkSession).show()
     }
 
     it("Read shapefile -> DataFrame") {
       var spatialRDD = ShapefileReader.readToGeometryRDD(sparkSession.sparkContext, shapefileInputLocation)
       spatialRDD.analyze()
-      println(spatialRDD.fieldNames)
       var df = Adapter.toDf(spatialRDD, sparkSession)
-      df.show()
+      df.show(1)
     }
 
     it("Read shapefileWithMissing -> DataFrame") {
       var spatialRDD = ShapefileReader.readToGeometryRDD(sparkSession.sparkContext, shapefileWithMissingsTrailingInputLocation)
       spatialRDD.analyze()
-      println(spatialRDD.fieldNames)
       var df = Adapter.toDf(spatialRDD, sparkSession)
-      df.show()
+      df.show(1)
     }
 
     it("Read GeoJSON to DataFrame") {
@@ -151,7 +123,6 @@ class adapterTestScala extends TestBaseScala {
       var spatialRDD = new PolygonRDD(sparkSession.sparkContext, geojsonInputLocation, FileDataSplitter.GEOJSON, true)
       spatialRDD.analyze()
       var df = Adapter.toDf(spatialRDD, sparkSession).withColumn("geometry", callUDF("ST_GeomFromWKT", col("geometry")))
-      df.show()
       assert (df.columns(1) == "STATEFP")
     }
 
@@ -176,10 +147,10 @@ class adapterTestScala extends TestBaseScala {
       val joinResultPairRDD = JoinQuery.SpatialJoinQueryFlat(pointRDD, polygonRDD, true, true)
 
       val joinResultDf = Adapter.toDf(joinResultPairRDD, sparkSession)
-      joinResultDf.show()
+      joinResultDf.show(1)
 
       val joinResultDf2 = Adapter.toDf(joinResultPairRDD, List("abc","def"), List(), sparkSession)
-      joinResultDf2.show()
+      joinResultDf2.show(1)
     }
 
     it("Convert distance join result to DataFrame") {
@@ -204,7 +175,7 @@ class adapterTestScala extends TestBaseScala {
       var joinResultPairRDD = JoinQuery.DistanceJoinQueryFlat(pointRDD, circleRDD, true, true)
 
       var joinResultDf = Adapter.toDf(joinResultPairRDD, sparkSession)
-      joinResultDf.show()
+      joinResultDf.show(1)
     }
 
     it("load id column Data check"){
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/aggregateFunctionTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/aggregateFunctionTestScala.scala
index 802c90a..751a49f 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/aggregateFunctionTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/aggregateFunctionTestScala.scala
@@ -27,9 +27,6 @@
 package org.datasyslab.geosparksql
 
 import org.locationtech.jts.geom.{Coordinate, Geometry, GeometryFactory}
-import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
-import org.apache.spark.sql.expressions.UserDefinedAggregator
-import org.apache.spark.sql.functions
 
 class aggregateFunctionTestScala extends TestBaseScala {
 
@@ -56,10 +53,8 @@ class aggregateFunctionTestScala extends TestBaseScala {
 
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(unionPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var union = sparkSession.sql("select ST_Union_Aggr(polygondf.polygonshape) from polygondf")
       assert(union.take(1)(0).get(0).asInstanceOf[Geometry].getArea == 10100)
     }
@@ -68,14 +63,12 @@ class aggregateFunctionTestScala extends TestBaseScala {
 
       val twoPolygonsAsWktDf = sparkSession.read.textFile(intersectionPolygonInputLocation).toDF("polygon_wkt")
       twoPolygonsAsWktDf.createOrReplaceTempView("two_polygons_wkt")
-      twoPolygonsAsWktDf.show()
 
       sparkSession
         .sql("select ST_GeomFromWKT(polygon_wkt) as polygon from two_polygons_wkt")
         .createOrReplaceTempView("two_polygons")
 
       val intersectionDF = sparkSession.sql("select ST_Intersection_Aggr(polygon) from two_polygons")
-      intersectionDF.show(false)
 
       assertResult(0.0034700160226227607)(intersectionDF.take(1)(0).get(0).asInstanceOf[Geometry].getArea)
     }
@@ -84,14 +77,12 @@ class aggregateFunctionTestScala extends TestBaseScala {
 
       val twoPolygonsAsWktDf = sparkSession.read.textFile(intersectionPolygonNoIntersectionInputLocation).toDF("polygon_wkt")
       twoPolygonsAsWktDf.createOrReplaceTempView("two_polygons_no_intersection_wkt")
-      twoPolygonsAsWktDf.show()
 
       sparkSession
         .sql("select ST_GeomFromWKT(polygon_wkt) as polygon from two_polygons_no_intersection_wkt")
         .createOrReplaceTempView("two_polygons_no_intersection")
 
       val intersectionDF = sparkSession.sql("select ST_Intersection_Aggr(polygon) from two_polygons_no_intersection")
-      intersectionDF.show(false)
 
       assertResult(0.0)(intersectionDF.take(1)(0).get(0).asInstanceOf[Geometry].getArea)
     }
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/constructorTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/constructorTestScala.scala
index c87d6ce..ffcb94e 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/constructorTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/constructorTestScala.scala
@@ -51,14 +51,12 @@ class constructorTestScala extends TestBaseScala {
 
     it ("Passed ST_PolygonFromEnvelope") {
       val polygonDF = sparkSession.sql("select ST_PolygonFromEnvelope(double(1.234),double(2.234),double(3.345),double(3.345))")
-      polygonDF.show(false)
       assert(polygonDF.count() == 1)
     }
 
     it("Passed ST_PointFromText") {
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(arealmPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show(false)
 
       var pointDf = sparkSession.sql("select ST_PointFromText(concat(_c0,',',_c1),',') as arealandmark from pointtable")
       assert(pointDf.count() == 121960)
@@ -67,18 +65,14 @@ class constructorTestScala extends TestBaseScala {
     it("Passed ST_GeomFromWKT") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWkt(polygontable._c0) as countyshape from polygontable")
-      polygonDf.show(10)
       assert(polygonDf.count() == 100)
     }
 
     it("Passed ST_GeomFromText") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromText(polygontable._c0) as countyshape from polygontable")
-      polygonDf.show(10)
       assert(polygonDf.count() == 100)
     }
     
@@ -98,39 +92,32 @@ class constructorTestScala extends TestBaseScala {
     it("Passed ST_GeomFromWKB") {
       var polygonWkbDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWkbGeometryInputLocation)
       polygonWkbDf.createOrReplaceTempView("polygontable")
-      polygonWkbDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKB(polygontable._c0) as countyshape from polygontable")
-      polygonDf.show(10)
       assert(polygonDf.count() == 100)
     }
 
     it("Passed ST_GeomFromGeoJSON") {
       val polygonJsonDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(geojsonInputLocation)
       polygonJsonDf.createOrReplaceTempView("polygontable")
-      polygonJsonDf.show()
       val polygonDf = sparkSession.sql("select ST_GeomFromGeoJSON(polygontable._c0) as countyshape from polygontable")
-      polygonDf.show()
       assert(polygonDf.count() == 1000)
     }
 
     it("Passed GeoJsonReader to DataFrame") {
       var spatialRDD = GeoJsonReader.readToGeometryRDD(sparkSession.sparkContext, geojsonInputLocation)
       var spatialDf = Adapter.toDf(spatialRDD, sparkSession)
-      spatialDf.show()
+      assert(spatialDf.count()>0)
     }
 
     it("Read shapefile -> DataFrame > RDD -> DataFrame") {
       var spatialRDD = ShapefileReader.readToGeometryRDD(sparkSession.sparkContext, shapefileInputLocation)
       spatialRDD.analyze()
       var df = Adapter.toDf(spatialRDD, sparkSession)
-      df.show
       assert (df.columns(1) == "STATEFP")
       import org.apache.spark.sql.functions.{callUDF, col}
       df = df.withColumn("geometry", callUDF("ST_GeomFromWKT", col("geometry")))
-      df.show()
       var spatialRDD2 = Adapter.toSpatialRdd(df, "geometry")
-      println(spatialRDD2.rawSpatialRDD.take(1).get(0).getUserData)
-      Adapter.toDf(spatialRDD2, sparkSession).show()
+      Adapter.toDf(spatialRDD2, sparkSession).show(1)
     }
   }
 }
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/functionTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/functionTestScala.scala
index a6d5ade..29d6402 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/functionTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/functionTestScala.scala
@@ -42,89 +42,72 @@ class functionTestScala extends TestBaseScala with Matchers with GeometrySample
     it("Passed ST_ConvexHull") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_ConvexHull(polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Buffer") {
       val polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       val polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       val functionDf = sparkSession.sql("select ST_Buffer(polygondf.countyshape, 1) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Envelope") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Envelope(polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Centroid") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Centroid(polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Length") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Length(polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Area") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Area(polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Distance") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Distance(polygondf.countyshape, polygondf.countyshape) from polygondf")
-      functionDf.show()
+      assert(functionDf.count()>0);
     }
 
     it("Passed ST_Transform") {
       var polygonWktDf = sparkSession.read.format("csv").option("delimiter", "\t").option("header", "false").load(mixedWktGeometryInputLocation)
       polygonWktDf.createOrReplaceTempView("polygontable")
-      polygonWktDf.show()
       var polygonDf = sparkSession.sql("select ST_GeomFromWKT(polygontable._c0) as countyshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
       var functionDf = sparkSession.sql("select ST_Transform(polygondf.countyshape, 'epsg:4326','epsg:3857',true, false) from polygondf")
-      functionDf.show()
 
       val polygon = "POLYGON ((110.54671 55.818002, 110.54671 55.143743, 110.940494 55.143743, 110.940494 55.818002, 110.54671 55.818002))"
       val forceXYExpect = "POLYGON ((471596.69167460164 6185916.951191288, 471107.5623640998 6110880.974228167, 496207.109151055 6110788.804712435, 496271.31937046186 6185825.60569904, 471596.69167460164 6185916.951191288))"
@@ -134,10 +117,8 @@ class functionTestScala extends TestBaseScala with Matchers with GeometrySample
         .createOrReplaceTempView("df")
 
       sparkSession.sql("select ST_Transform(geom, 'EPSG:4326', 'EPSG:32649', false, false)  from df")
-        .show(false)
 
       sparkSession.sql("select ST_Transform(geom, 'EPSG:4326', 'EPSG:32649', true, false)  from df")
-        .show(false)
 
       val forceXYResult = sparkSession.sql(s"""select ST_Transform(ST_geomFromWKT('$polygon'),'EPSG:4326', 'EPSG:32649', true, false)""").rdd.map(row => row.getAs[Geometry](0).toString).collect()(0)
       assert(forceXYResult == forceXYExpect)
@@ -199,13 +180,11 @@ class functionTestScala extends TestBaseScala with Matchers with GeometrySample
         """
           |SELECT ST_PrecisionReduce(ST_GeomFromWKT('Point(0.1234567890123456789 0.1234567890123456789)'), 8)
         """.stripMargin)
-      testtable.show(false)
       assert(testtable.take(1)(0).get(0).asInstanceOf[Geometry].getCoordinates()(0).x == 0.12345679)
       testtable = sparkSession.sql(
         """
           |SELECT ST_PrecisionReduce(ST_GeomFromWKT('Point(0.1234567890123456789 0.1234567890123456789)'), 11)
         """.stripMargin)
-      testtable.show(false)
       assert(testtable.take(1)(0).get(0).asInstanceOf[Geometry].getCoordinates()(0).x == 0.12345678901)
 
     }
@@ -351,9 +330,6 @@ class functionTestScala extends TestBaseScala with Matchers with GeometrySample
 
       geometries
         .selectExpr("ST_Azimuth(geomA, geomB)")
-        .show
-      geometries
-        .selectExpr("ST_Azimuth(geomA, geomB)")
         .as[Double]
         .map(180 / math.Pi * _)
         .collect()
@@ -604,7 +580,6 @@ class functionTestScala extends TestBaseScala with Matchers with GeometrySample
 
     When("Using ST_Dumps")
     val dumpedGeometries = geometryDf.selectExpr("ST_Dump(geom) as geom")
-    dumpedGeometries.show(10, false)
     Then("Should return geometries list")
 
     dumpedGeometries.select(explode($"geom")).count shouldBe 14
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/predicateJoinTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/predicateJoinTestScala.scala
index cf29863..0026859 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/predicateJoinTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/predicateJoinTestScala.scala
@@ -41,103 +41,67 @@ class predicateJoinTestScala extends TestBaseScala {
 
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Contains(polygondf.polygonshape,pointdf.pointshape) ")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 1000)
     }
 
     it("Passed ST_Intersects in a join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Intersects(polygondf.polygonshape,pointdf.pointshape) ")
-
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 1000)
     }
 
     it("Passed ST_Touches in a join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Touches(polygondf.polygonshape,pointdf.pointshape) ")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 1000)
     }
 
     it("Passed ST_Within in a join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Within(pointdf.pointshape, polygondf.polygonshape) ")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 1000)
     }
     
     it("Passed ST_Overlaps in a join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
@@ -150,33 +114,22 @@ class predicateJoinTestScala extends TestBaseScala {
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, polygonodf where ST_Overlaps(polygondf.polygonshape, polygonodf.polygonshape)")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 57)
     }
 
     it("Passed ST_Crosses in a join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Crosses(pointdf.pointshape, polygondf.polygonshape) ")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 1000)
     }
 
@@ -185,21 +138,16 @@ class predicateJoinTestScala extends TestBaseScala {
 
       var pointCsvDF1 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF1.createOrReplaceTempView("pointtable")
-      pointCsvDF1.show()
       var pointDf1 = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape1 from pointtable")
       pointDf1.createOrReplaceTempView("pointdf1")
-      pointDf1.show()
 
       var pointCsvDF2 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF2.createOrReplaceTempView("pointtable")
-      pointCsvDF2.show()
       var pointDf2 = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape2 from pointtable")
       pointDf2.createOrReplaceTempView("pointdf2")
-      pointDf2.show()
 
       var distanceJoinDf = sparkSession.sql("select * from pointdf1, pointdf2 where ST_Distance(pointdf1.pointshape1,pointdf2.pointshape2) <= 2")
-      distanceJoinDf.explain()
-      distanceJoinDf.show(10)
+
       assert(distanceJoinDf.count() == 2998)
     }
 
@@ -208,47 +156,33 @@ class predicateJoinTestScala extends TestBaseScala {
 
       var pointCsvDF1 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF1.createOrReplaceTempView("pointtable")
-      pointCsvDF1.show()
       var pointDf1 = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape1 from pointtable")
       pointDf1.createOrReplaceTempView("pointdf1")
-      pointDf1.show()
 
       var pointCsvDF2 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF2.createOrReplaceTempView("pointtable")
-      pointCsvDF2.show()
       var pointDf2 = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape2 from pointtable")
       pointDf2.createOrReplaceTempView("pointdf2")
-      pointDf2.show()
 
       var distanceJoinDf = sparkSession.sql("select * from pointdf1, pointdf2 where ST_Distance(pointdf1.pointshape1,pointdf2.pointshape2) < 2")
-      distanceJoinDf.explain()
-      distanceJoinDf.show(10)
+
       assert(distanceJoinDf.count() == 2998)
     }
 
     it("Passed ST_Contains in a range and join") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygonInputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var pointCsvDF = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPointInputLocation)
       pointCsvDF.createOrReplaceTempView("pointtable")
-      pointCsvDF.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20))) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var rangeJoinDf = sparkSession.sql("select * from polygondf, pointdf where ST_Contains(polygondf.polygonshape,pointdf.pointshape) " +
         "and ST_Contains(ST_PolygonFromEnvelope(1.0,101.0,501.0,601.0), polygondf.polygonshape)")
 
-      rangeJoinDf.explain()
-      rangeJoinDf.show(3)
       assert(rangeJoinDf.count() == 500)
     }
 
@@ -263,7 +197,6 @@ class predicateJoinTestScala extends TestBaseScala {
 
       val pointDF = sparkSession.sql("select id, ST_Point(cast(lat as Decimal(24,20)), cast(lon as Decimal(24,20))) AS latlon_point FROM rawPointDf")
       pointDF.createOrReplaceTempView("pointDf")
-      pointDF.show(false)
 
       val rawPolygonDf = sparkSession.createDataFrame(
         sparkSession.sparkContext.parallelize(
@@ -284,101 +217,65 @@ class predicateJoinTestScala extends TestBaseScala {
       assert(withinEnvelopeDF.count() == 1)
     }
     it("Passed ST_Equals in a join for ST_Point") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var pointCsvDf1 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPoint1InputLocation)
       pointCsvDf1.createOrReplaceTempView("pointtable1")
-      pointCsvDf1.show()
       var pointDf1 = sparkSession.sql("select ST_Point(cast(pointtable1._c0 as Decimal(24,20)),cast(pointtable1._c1 as Decimal(24,20)) ) as pointshape1 from pointtable1")
       pointDf1.createOrReplaceTempView("pointdf1")
-      pointDf1.show()
 
       var pointCsvDF2 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPoint2InputLocation)
       pointCsvDF2.createOrReplaceTempView("pointtable2")
-      pointCsvDF2.show()
       var pointDf2 = sparkSession.sql("select ST_Point(cast(pointtable2._c0 as Decimal(24,20)),cast(pointtable2._c1 as Decimal(24,20))) as pointshape2 from pointtable2")
       pointDf2.createOrReplaceTempView("pointdf2")
-      pointDf2.show()
 
       var equalJoinDf = sparkSession.sql("select * from pointdf1, pointdf2 where ST_Equals(pointdf1.pointshape1,pointdf2.pointshape2) ")
 
-      equalJoinDf.explain()
-      equalJoinDf.show(3)
       assert(equalJoinDf.count() == 100, s"Expected 100 but got ${equalJoinDf.count()}")
     }
     it("Passed ST_Equals in a join for ST_Polygon") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf1 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygon1InputLocation)
       polygonCsvDf1.createOrReplaceTempView("polygontable1")
-      polygonCsvDf1.show()
       var polygonDf1 = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable1._c0 as Decimal(24,20)),cast(polygontable1._c1 as Decimal(24,20)), cast(polygontable1._c2 as Decimal(24,20)), cast(polygontable1._c3 as Decimal(24,20))) as polygonshape1 from polygontable1")
       polygonDf1.createOrReplaceTempView("polygondf1")
-      polygonDf1.show()
 
 
       var polygonCsvDf2 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygon2InputLocation)
       polygonCsvDf2.createOrReplaceTempView("polygontable2")
-      polygonCsvDf2.show()
       var polygonDf2 = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable2._c0 as Decimal(24,20)),cast(polygontable2._c1 as Decimal(24,20)), cast(polygontable2._c2 as Decimal(24,20)), cast(polygontable2._c3 as Decimal(24,20))) as polygonshape2 from polygontable2")
       polygonDf2.createOrReplaceTempView("polygondf2")
-      polygonDf2.show()
 
       var equalJoinDf = sparkSession.sql("select * from polygondf1, polygondf2 where ST_Equals(polygondf1.polygonshape1,polygondf2.polygonshape2) ")
 
-      equalJoinDf.explain()
-      equalJoinDf.show(3)
       assert(equalJoinDf.count() == 100, s"Expected 100 but got ${equalJoinDf.count()}")
     }
     it("Passed ST_Equals in a join for ST_Polygon Random Shuffle") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var polygonCsvDf1 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygon1RandomInputLocation)
       polygonCsvDf1.createOrReplaceTempView("polygontable1")
-      polygonCsvDf1.show()
       var polygonDf1 = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable1._c0 as Decimal(24,20)),cast(polygontable1._c1 as Decimal(24,20)), cast(polygontable1._c2 as Decimal(24,20)), cast(polygontable1._c3 as Decimal(24,20))) as polygonshape1 from polygontable1")
       polygonDf1.createOrReplaceTempView("polygondf1")
-      polygonDf1.show()
 
 
       var polygonCsvDf2 = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygon2RandomInputLocation)
       polygonCsvDf2.createOrReplaceTempView("polygontable2")
-      polygonCsvDf2.show()
       var polygonDf2 = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable2._c0 as Decimal(24,20)),cast(polygontable2._c1 as Decimal(24,20)), cast(polygontable2._c2 as Decimal(24,20)), cast(polygontable2._c3 as Decimal(24,20))) as polygonshape2 from polygontable2")
       polygonDf2.createOrReplaceTempView("polygondf2")
-      polygonDf2.show()
 
       var equalJoinDf = sparkSession.sql("select * from polygondf1, polygondf2 where ST_Equals(polygondf1.polygonshape1,polygondf2.polygonshape2) ")
 
-      equalJoinDf.explain()
-      equalJoinDf.show(3)
       assert(equalJoinDf.count() == 100, s"Expected 100 but got ${equalJoinDf.count()}")
     }
     it("Passed ST_Equals in a join for ST_Point and ST_Polygon") {
-      val geosparkConf = new GeoSparkConf(sparkSession.sparkContext.getConf)
-      println(geosparkConf)
-
       var pointCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPoint1InputLocation)
       pointCsvDf.createOrReplaceTempView("pointtable")
-      pointCsvDf.show()
       var pointDf = sparkSession.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)),cast(pointtable._c1 as Decimal(24,20)) ) as pointshape from pointtable")
       pointDf.createOrReplaceTempView("pointdf")
-      pointDf.show()
 
       var polygonCsvDf = sparkSession.read.format("csv").option("delimiter", ",").option("header", "false").load(csvPolygon1InputLocation)
       polygonCsvDf.createOrReplaceTempView("polygontable")
-      polygonCsvDf.show()
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       var equalJoinDf = sparkSession.sql("select * from pointdf, polygondf where ST_Equals(pointdf.pointshape,polygondf.polygonshape) ")
 
-      equalJoinDf.explain()
-      equalJoinDf.show(3)
       assert(equalJoinDf.count() == 0, s"Expected 0 but got ${equalJoinDf.count()}")
     }
   }
diff --git a/sql/src/test/scala/org/datasyslab/geosparksql/predicateTestScala.scala b/sql/src/test/scala/org/datasyslab/geosparksql/predicateTestScala.scala
index 954a82a..09f15de 100644
--- a/sql/src/test/scala/org/datasyslab/geosparksql/predicateTestScala.scala
+++ b/sql/src/test/scala/org/datasyslab/geosparksql/predicateTestScala.scala
@@ -37,7 +37,6 @@ class predicateTestScala extends TestBaseScala {
       pointDf.createOrReplaceTempView("pointdf")
 
       var resultDf = sparkSession.sql("select * from pointdf where ST_Contains(ST_PolygonFromEnvelope(1.0,100.0,1000.0,1100.0), pointdf.arealandmark)")
-      resultDf.show()
       assert(resultDf.count() == 999)
     }
     it("Passed ST_Intersects") {
@@ -47,7 +46,6 @@ class predicateTestScala extends TestBaseScala {
       pointDf.createOrReplaceTempView("pointdf")
 
       var resultDf = sparkSession.sql("select * from pointdf where ST_Intersects(ST_PolygonFromEnvelope(1.0,100.0,1000.0,1100.0), pointdf.arealandmark)")
-      resultDf.show()
       assert(resultDf.count() == 999)
     }
     it("Passed ST_Within") {
@@ -57,7 +55,6 @@ class predicateTestScala extends TestBaseScala {
       pointDf.createOrReplaceTempView("pointdf")
 
       var resultDf = sparkSession.sql("select * from pointdf where ST_Within(pointdf.arealandmark, ST_PolygonFromEnvelope(1.0,100.0,1000.0,1100.0))")
-      resultDf.show()
       assert(resultDf.count() == 999)
     }
 
@@ -73,7 +70,6 @@ class predicateTestScala extends TestBaseScala {
       pointDf.createOrReplaceTempView("pointdf")
 
       var equaldf = sparkSession.sql("select * from pointdf where ST_Equals(pointdf.point, ST_Point(100.1, 200.1)) ")
-      equaldf.show()
 
       assert(equaldf.count() == 5, s"Expected 5 value but got ${equaldf.count()}")
 
@@ -90,17 +86,14 @@ class predicateTestScala extends TestBaseScala {
       // Convert the polygontable to polygons using ST_PolygonFromEnvelope
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       // Selected polygon is Polygon (100.01,200.01,100.5,200.5)
       var equaldf1 = sparkSession.sql("select * from polygonDf where ST_Equals(polygonDf.polygonshape, ST_PolygonFromEnvelope(100.01,200.01,100.5,200.5)) ")
-      equaldf1.show()
 
       assert(equaldf1.count() == 5, s"Expected 5 value but got ${equaldf1.count()}")
 
       // Change the order of the polygon points (100.5,200.5,100.01,200.01)
       var equaldf2 = sparkSession.sql("select * from polygonDf where ST_Equals(polygonDf.polygonshape, ST_PolygonFromEnvelope(100.5,200.5,100.01,200.01)) ")
-      equaldf2.show()
 
       assert(equaldf2.count() == 5, s"Expected 5 value but got ${equaldf2.count()}")
 
@@ -117,11 +110,9 @@ class predicateTestScala extends TestBaseScala {
       // Convert the polygontable to polygons using ST_PolygonFromEnvelope and cast
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       // Selected point is Point (91.01,191.01)
       var equaldf = sparkSession.sql("select * from polygonDf where ST_Equals(polygonDf.polygonshape, ST_Point(91.01,191.01)) ")
-      equaldf.show()
 
       assert(equaldf.count() == 0, s"Expected 0 value but got ${equaldf.count()}")
 
@@ -138,7 +129,6 @@ class predicateTestScala extends TestBaseScala {
       // Convert the polygontable to polygons using ST_PolygonFromEnvelope and cast
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       /* Selected LineString is ST_LineStringFromText - (100.01,200.01,100.5,200.01,100.5,200.5,100.01,200.5,100.01,200.01)
        * It forms the boundary of the polygon Polygon(100.01,200.01,100.5,200.5)
@@ -153,7 +143,6 @@ class predicateTestScala extends TestBaseScala {
       val string = "100.01,200.01,100.5,200.01,100.5,200.5,100.01,200.5,100.01,200.01"
 
       var equaldf = sparkSession.sql(s"select * from polygonDf where ST_Equals(polygonDf.polygonshape, ST_LineStringFromText(\'$string\', \',\')) ")
-      equaldf.show()
 
       assert(equaldf.count() == 0, s"Expected 0 value but got ${equaldf.count()}")
 
@@ -169,13 +158,11 @@ class predicateTestScala extends TestBaseScala {
       // Convert the polygontable to polygons using ST_PolygonFromEnvelope and cast
       var polygonDf = sparkSession.sql("select ST_PolygonFromEnvelope(cast(polygontable._c0 as Decimal(24,20)),cast(polygontable._c1 as Decimal(24,20)), cast(polygontable._c2 as Decimal(24,20)), cast(polygontable._c3 as Decimal(24,20))) as polygonshape from polygontable")
       polygonDf.createOrReplaceTempView("polygondf")
-      polygonDf.show()
 
       // Selected Polygon is ST_PolygonFromText - Polygon(100.01,200.01,100.5,200.5) formed using ST_PolygonFromText.
       val string = "100.01,200.01,100.5,200.01,100.5,200.5,100.01,200.5,100.01,200.01"
 
       var equaldf = sparkSession.sql(s"select * from polygonDf where ST_Equals(polygonDf.polygonshape, ST_PolygonFromText(\'$string\', \',\')) ")
-      equaldf.show()
 
       assert(equaldf.count() == 5, s"Expected 5 value but got ${equaldf.count()}")
     }
@@ -200,7 +187,6 @@ class predicateTestScala extends TestBaseScala {
       pointDf.createOrReplaceTempView("pointdf")
 
       var resultDf = sparkSession.sql("select * from pointdf where ST_Touches(pointdf.arealandmark, ST_PolygonFromEnvelope(0.0,99.0,1.1,101.1))")
-      resultDf.show()
       assert(resultDf.count() == 1)
     }
     it("Passed ST_Overlaps") {
diff --git a/viz/src/test/scala/org/datasyslab/geosparkviz/sql/optVizOperatorTest.scala b/viz/src/test/scala/org/datasyslab/geosparkviz/sql/optVizOperatorTest.scala
index 55bb03b..9f86cd5 100644
--- a/viz/src/test/scala/org/datasyslab/geosparkviz/sql/optVizOperatorTest.scala
+++ b/viz/src/test/scala/org/datasyslab/geosparkviz/sql/optVizOperatorTest.scala
@@ -52,7 +52,7 @@ class optVizOperatorTest extends TestBaseScala {
           |SELECT pixel, ${Conf.PrimaryPID}, ${Conf.SecondaryPID}, ST_Colorize(weight, (SELECT max(weight) FROM pixelaggregates))
           |FROM pixelaggregates
         """.stripMargin)
-      spark.table("colors").show()
+      spark.table("colors").show(1)
     }
 
     it("Passed full pipeline - aggregate:avg - color:uniform") {
@@ -98,7 +98,7 @@ class optVizOperatorTest extends TestBaseScala {
            |SELECT pixel, ${Conf.PrimaryPID}, ${Conf.SecondaryPID}, ST_Colorize(weight, 0, 'red')
            |FROM pixelaggregates
         """.stripMargin)
-      spark.table("colors").show()
+      spark.table("colors").show(1)
     }
 
     it("Passed lineage decoder"){
diff --git a/viz/src/test/scala/org/datasyslab/geosparkviz/sql/standardVizOperatorTest.scala b/viz/src/test/scala/org/datasyslab/geosparkviz/sql/standardVizOperatorTest.scala
index 4614750..6da5481 100644
--- a/viz/src/test/scala/org/datasyslab/geosparkviz/sql/standardVizOperatorTest.scala
+++ b/viz/src/test/scala/org/datasyslab/geosparkviz/sql/standardVizOperatorTest.scala
@@ -51,7 +51,7 @@ class standardVizOperatorTest extends TestBaseScala {
           |SELECT ST_EncodeImage(image)
           |FROM images
         """.stripMargin)
-      spark.table("imagestring").show()
+      spark.table("imagestring").show(1)
     }
 
     it("Generate a single image using a fat query") {
@@ -94,7 +94,7 @@ class standardVizOperatorTest extends TestBaseScala {
           |SELECT ST_EncodeImage(ST_Render(pixel, ST_Colorize(weight, (SELECT max(weight) FROM pixelaggregates)))) AS image, (SELECT ST_AsText(bound) FROM boundtable) AS boundary
           |FROM pixelaggregates
         """.stripMargin)
-      spark.table("images").show()
+      spark.table("images").show(1)
     }
 
     it("Passed the pipeline on points") {
@@ -127,7 +127,7 @@ class standardVizOperatorTest extends TestBaseScala {
           |GROUP BY pixel
         """.stripMargin)
       val pixelaggregates = spark.table("pixelaggregates")
-      pixelaggregates.show()
+      pixelaggregates.show(1)
     }
 
     it("Passed the pipeline on polygons") {
@@ -218,7 +218,7 @@ class standardVizOperatorTest extends TestBaseScala {
           |FROM pixelaggregates
           |GROUP BY pid
         """.stripMargin).explain()
-      spark.table("images").show()
+      spark.table("images").show(1)
     }
   }
 }