You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sedona.apache.org by ji...@apache.org on 2023/01/28 00:26:01 UTC

[sedona] branch master updated: [SEDONA-234] ST_Point inconsistencies (#751)

This is an automated email from the ASF dual-hosted git repository.

jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git


The following commit(s) were added to refs/heads/master by this push:
     new ee98f555 [SEDONA-234] ST_Point inconsistencies (#751)
ee98f555 is described below

commit ee98f555ff0f8c0cd7f30dd02ac0418ee7f17523
Author: Martin Andersson <u....@gmail.com>
AuthorDate: Sat Jan 28 01:25:55 2023 +0100

    [SEDONA-234] ST_Point inconsistencies (#751)
---
 .../org/apache/sedona/common/Constructors.java     | 34 +++++++++++++++++++
 .../org/apache/sedona/common/ConstructorsTest.java | 19 +++++++++++
 docs/api/sql/Constructor.md                        | 18 +++++++++-
 python/sedona/sql/st_constructors.py               | 26 +++++++++++----
 python/tests/sql/test_constructor_test.py          |  4 +--
 python/tests/sql/test_dataframe_api.py             |  2 +-
 python/tests/sql/test_function.py                  |  2 +-
 .../scala/org/apache/sedona/sql/UDF/Catalog.scala  |  3 +-
 .../sql/sedona_sql/expressions/Constructors.scala  | 31 +++++++-----------
 .../expressions/NullSafeExpressions.scala          | 38 ++++++++++++++++++++++
 .../sedona_sql/expressions/st_constructors.scala   | 21 ++++++++----
 .../apache/sedona/sql/constructorTestScala.scala   | 14 ++++++--
 .../apache/sedona/sql/dataFrameAPITestScala.scala  | 17 +++++++---
 13 files changed, 184 insertions(+), 45 deletions(-)

diff --git a/common/src/main/java/org/apache/sedona/common/Constructors.java b/common/src/main/java/org/apache/sedona/common/Constructors.java
index 7cd5e4a8..e5384471 100644
--- a/common/src/main/java/org/apache/sedona/common/Constructors.java
+++ b/common/src/main/java/org/apache/sedona/common/Constructors.java
@@ -13,6 +13,7 @@
  */
 package org.apache.sedona.common;
 
+import org.locationtech.jts.geom.Coordinate;
 import org.locationtech.jts.geom.Geometry;
 import org.locationtech.jts.geom.GeometryFactory;
 import org.locationtech.jts.geom.PrecisionModel;
@@ -44,4 +45,37 @@ public class Constructors {
         GeometryFactory geometryFactory = new GeometryFactory(new PrecisionModel(), srid);
         return new WKTReader(geometryFactory).read(wkt);
     }
+
+
+    /**
+     * Creates a point from the given coordinate.
+     * ST_Point in Sedona Spark API took an optional z value before v1.4.0.
+     * This was removed to avoid confusion with other GIS implementations where the optional third argument is srid.
+     *
+     * A future version of Sedona will add a srid parameter once enough users have upgraded and hence are forced
+     * to use ST_PointZ for 3D points.
+     *
+     * @param x the x value
+     * @param y the y value
+     * @return The point geometry
+     */
+    public static Geometry point(double x, double y) {
+        // See srid parameter discussion in https://issues.apache.org/jira/browse/SEDONA-234
+        GeometryFactory geometryFactory = new GeometryFactory();
+        return geometryFactory.createPoint(new Coordinate(x, y));
+    }
+
+    /**
+     * Creates a point from the given coordinate.
+     *
+     * @param x the x value
+     * @param y the y value
+     * @param z the z value
+     * @param srid Set to 0 if unknown
+     * @return The point geometry
+     */
+    public static Geometry pointZ(double x, double y, double z, int srid) {
+        GeometryFactory geometryFactory = new GeometryFactory(new PrecisionModel(), srid);
+        return geometryFactory.createPoint(new Coordinate(x, y, z));
+    }
 }
diff --git a/common/src/test/java/org/apache/sedona/common/ConstructorsTest.java b/common/src/test/java/org/apache/sedona/common/ConstructorsTest.java
index 89fe729c..1caa93aa 100644
--- a/common/src/test/java/org/apache/sedona/common/ConstructorsTest.java
+++ b/common/src/test/java/org/apache/sedona/common/ConstructorsTest.java
@@ -15,6 +15,7 @@ package org.apache.sedona.common;
 
 import org.junit.Test;
 import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
 import org.locationtech.jts.io.ParseException;
 
 import static org.junit.Assert.*;
@@ -74,4 +75,22 @@ public class ConstructorsTest {
         assertEquals("Expected EMPTY or ( but found 'not' (line 1)", parseException.getMessage());
 
     }
+
+    @Test
+    public void point() {
+        Geometry point = Constructors.point(1.0d, 2.0d);
+
+        assertTrue(point instanceof Point);
+        assertEquals(0, point.getSRID());
+        assertEquals("POINT (1 2)", point.toText());
+    }
+
+    @Test
+    public void pointZ() {
+        Geometry point = Constructors.pointZ(0.0d, 1.0d, 2.0d, 4326);
+
+        assertTrue(point instanceof Point);
+        assertEquals(4326, point.getSRID());
+        assertEquals("POINT Z(0 1 2)", Functions.asWKT(point));
+    }
 }
\ No newline at end of file
diff --git a/docs/api/sql/Constructor.md b/docs/api/sql/Constructor.md
index 035b2887..292c1176 100644
--- a/docs/api/sql/Constructor.md
+++ b/docs/api/sql/Constructor.md
@@ -247,16 +247,32 @@ SELECT ST_MPolyFromText('MULTIPOLYGON(((-70.916 42.1002,-70.9468 42.0946,-70.976
 Introduction: Construct a Point from X and Y
 
 Format: `ST_Point (X:decimal, Y:decimal)`
-Format: `ST_Point (X:decimal, Y:decimal, Z:decimal)`
 
 Since: `v1.0.0`
 
+In `v1.4.0` an optional Z parameter was removed to be more consistent with other spatial SQL implementations.
+If you are upgrading from an older version of Sedona - please use ST_PointZ to create 3D points.
+
 Spark SQL example:
 ```SQL
 SELECT ST_Point(CAST(pointtable._c0 AS Decimal(24,20)), CAST(pointtable._c1 AS Decimal(24,20))) AS pointshape
 FROM pointtable
 ```
 
+## ST_PointZ
+
+Introduction: Construct a Point from X, Y and Z and an optional srid. If srid is not set, it defaults to 0 (unknown).
+
+Format: `ST_PointZ (X:decimal, Y:decimal, Z:decimal)`
+Format: `ST_PointZ (X:decimal, Y:decimal, Z:decimal, srid:integer)`
+
+Since: `v1.4.0`
+
+Spark SQL example:
+```SQL
+SELECT ST_PointZ(1.0, 2.0, 3.0) AS pointshape
+```
+
 ## ST_PointFromText
 
 Introduction: Construct a Point from Text, delimited by Delimiter
diff --git a/python/sedona/sql/st_constructors.py b/python/sedona/sql/st_constructors.py
index 9d51a78a..62a8e5c6 100644
--- a/python/sedona/sql/st_constructors.py
+++ b/python/sedona/sql/st_constructors.py
@@ -161,21 +161,35 @@ def ST_LineStringFromText(coords: ColumnOrName, delimiter: ColumnOrName) -> Colu
 
 
 @validate_argument_types
-def ST_Point(x: ColumnOrNameOrNumber, y: ColumnOrNameOrNumber, z: Optional[ColumnOrNameOrNumber] = None) -> Column:
-    """Generate either a 2D or 3D point geometry column from numeric values.
+def ST_Point(x: ColumnOrNameOrNumber, y: ColumnOrNameOrNumber) -> Column:
+    """Generates a 2D point geometry column from numeric values.
 
     :param x: Either a number or numeric column representing the X coordinate of a point.
     :type x: ColumnOrNameOrNumber
     :param y: Either a number or numeric column representing the Y coordinate of a point.
     :type y: ColumnOrNameOrNumber
-    :param z: Either a number or numeric column representing the Z coordinate of a point, if None then a 2D point is generated, defaults to None
-    :type z: Optional[ColumnOrNameOrNumber], optional
     :return: Point geometry column generated from the coordinate values.
     :rtype: Column
     """
-    args = (x, y) if z is None else (x, y, z)
-    return _call_constructor_function("ST_Point", args)
+    return _call_constructor_function("ST_Point", (x, y))
+
+@validate_argument_types
+def ST_PointZ(x: ColumnOrNameOrNumber, y: ColumnOrNameOrNumber, z: ColumnOrNameOrNumber, srid: Optional[ColumnOrNameOrNumber] = None) -> Column:
+    """Generates a 3D point geometry column from numeric values.
 
+    :param x: Either a number or numeric column representing the X coordinate of a point.
+    :type x: ColumnOrNameOrNumber
+    :param y: Either a number or numeric column representing the Y coordinate of a point.
+    :type y: ColumnOrNameOrNumber
+    :param z: Either a number or numeric column representing the Z coordinate of a point, if None then a 2D point is generated, defaults to None
+    :type z: ColumnOrNameOrNumber
+    :param srid: The srid of the point. Defaults to 0 (unknown).
+    :type srid: Optional[ColumnOrNameOrNumber], optional
+    :return: Point geometry column generated from the coordinate values.
+    :rtype: Column
+    """
+    args = (x, y, z) if srid is None else (x, y, z, srid)
+    return _call_constructor_function("ST_PointZ", args)
 
 @validate_argument_types
 def ST_PointFromText(coords: ColumnOrName, delimiter: ColumnOrName) -> Column:
diff --git a/python/tests/sql/test_constructor_test.py b/python/tests/sql/test_constructor_test.py
index 89db0fa4..2ed0d6ad 100644
--- a/python/tests/sql/test_constructor_test.py
+++ b/python/tests/sql/test_constructor_test.py
@@ -33,8 +33,8 @@ class TestConstructors(TestBase):
         point_df = self.spark.sql("select ST_Point(cast(pointtable._c0 as Decimal(24,20)), cast(pointtable._c1 as Decimal(24,20))) as arealandmark from pointtable")
         assert point_df.count() == 1000
 
-    def test_st_point_3d(self):
-        point_df = self.spark.sql("SELECT ST_Point(1.2345, 2.3456, 3.4567)")
+    def test_st_point_z(self):
+        point_df = self.spark.sql("SELECT ST_PointZ(1.2345, 2.3456, 3.4567)")
         assert point_df.count() == 1
 
     def test_st_point_from_text(self):
diff --git a/python/tests/sql/test_dataframe_api.py b/python/tests/sql/test_dataframe_api.py
index 5094bbdd..b0926d04 100644
--- a/python/tests/sql/test_dataframe_api.py
+++ b/python/tests/sql/test_dataframe_api.py
@@ -347,7 +347,7 @@ class TestDataFrameAPI(TestBase):
         elif request.param == "triangle_geom":
             return TestDataFrameAPI.spark.sql("SELECT ST_GeomFromWKT('POLYGON ((0 0, 1 0, 1 1, 0 0))') AS geom")
         elif request.param == "two_points":
-            return TestDataFrameAPI.spark.sql("SELECT ST_Point(0.0, 0.0, 0.0) AS a, ST_Point(3.0, 0.0, 4.0) AS b")
+            return TestDataFrameAPI.spark.sql("SELECT ST_PointZ(0.0, 0.0, 0.0) AS a, ST_PointZ(3.0, 0.0, 4.0) AS b")
         elif request.param == "invalid_geom":
             return TestDataFrameAPI.spark.sql("SELECT ST_GeomFromWKT('POLYGON ((1 5, 1 1, 3 3, 5 3, 7 1, 7 5, 5 3, 3 3, 1 5))') AS geom")
         elif request.param == "overlapping_polys":
diff --git a/python/tests/sql/test_function.py b/python/tests/sql/test_function.py
index 8db02262..50bbb3c8 100644
--- a/python/tests/sql/test_function.py
+++ b/python/tests/sql/test_function.py
@@ -170,7 +170,7 @@ class TestPredicateJoin(TestBase):
         function_df.show()
 
     def test_st_3ddistance(self):
-        function_df = self.spark.sql("select ST_3DDistance(ST_Point(0.0, 0.0, 5.0), ST_Point(1.0, 1.0, -6.0))")
+        function_df = self.spark.sql("select ST_3DDistance(ST_PointZ(0.0, 0.0, 5.0), ST_PointZ(1.0, 1.0, -6.0))")
         assert function_df.count() == 1
 
     def test_st_transform(self):
diff --git a/sql/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala b/sql/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
index 6bc637b0..292bee07 100644
--- a/sql/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
+++ b/sql/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
@@ -46,7 +46,8 @@ object Catalog {
     function[ST_GeomFromGeoJSON](),
     function[ST_GeomFromGML](),
     function[ST_GeomFromKML](),
-    function[ST_Point](null),
+    function[ST_Point](),
+    function[ST_PointZ](0),
     function[ST_PolygonFromEnvelope](),
     function[ST_Contains](),
     function[ST_Intersects](),
diff --git a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Constructors.scala b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Constructors.scala
index 98309091..3505899f 100644
--- a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Constructors.scala
+++ b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Constructors.scala
@@ -280,30 +280,23 @@ case class ST_GeomFromGeoJSON(inputExpressions: Seq[Expression])
 /**
   * Return a Point from X and Y
   *
-  * @param inputExpressions This function takes 3 parameter which are point x, y and z.
+  * @param inputExpressions This function takes 2 parameter which are point x, y.
   */
 case class ST_Point(inputExpressions: Seq[Expression])
-  extends Expression with FoldableExpression with ImplicitCastInputTypes with CodegenFallback with UserDataGeneratator {
-
-  override def nullable: Boolean = false
+  extends InferredBinaryExpression(Constructors.point) with FoldableExpression {
 
-  override def eval(inputRow: InternalRow): Any = {
-    val x = inputExpressions(0).eval(inputRow).asInstanceOf[Double]
-    val y = inputExpressions(1).eval(inputRow).asInstanceOf[Double]
-    val coord = inputExpressions(2).eval(inputRow) match {
-      case null => new Coordinate(x, y)
-      case z: Double => new Coordinate(x, y, z)
-    }
-    val geometryFactory = new GeometryFactory()
-    val geometry = geometryFactory.createPoint(coord)
-    GeometrySerializer.serialize(geometry)
+  protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]) = {
+    copy(inputExpressions = newChildren)
   }
+}
 
-  override def dataType: DataType = GeometryUDT
-
-  override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType, DoubleType, DoubleType)
-
-  override def children: Seq[Expression] = inputExpressions
+/**
+ * Return a Point from X, Y, Z and srid
+ *
+ * @param inputExpressions This function takes 4 parameter which are point x, y, z and srid (default 0).
+ */
+case class ST_PointZ(inputExpressions: Seq[Expression])
+  extends InferredQuarternaryExpression(Constructors.pointZ) with FoldableExpression {
 
   protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]) = {
     copy(inputExpressions = newChildren)
diff --git a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/NullSafeExpressions.scala b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/NullSafeExpressions.scala
index 73979703..ea30ecd3 100644
--- a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/NullSafeExpressions.scala
+++ b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/NullSafeExpressions.scala
@@ -252,3 +252,41 @@ abstract class InferredTernaryExpression[A1: InferrableType, A2: InferrableType,
     }
   }
 }
+
+abstract class InferredQuarternaryExpression[A1: InferrableType, A2: InferrableType, A3: InferrableType, A4: InferrableType, R: InferrableType]
+(f: (A1, A2, A3, A4) => R)
+(implicit val a1Tag: TypeTag[A1], implicit val a2Tag: TypeTag[A2], implicit val a3Tag: TypeTag[A3], implicit val a4Tag: TypeTag[A4], implicit val rTag: TypeTag[R])
+  extends Expression with ImplicitCastInputTypes with CodegenFallback with Serializable {
+  import InferredTypes._
+
+  def inputExpressions: Seq[Expression]
+
+  override def children: Seq[Expression] = inputExpressions
+
+  override def toString: String = s" **${getClass.getName}**  "
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(inferSparkType[A1], inferSparkType[A2], inferSparkType[A3], inferSparkType[A4])
+
+  override def nullable: Boolean = true
+
+  override def dataType = inferSparkType[R]
+
+  lazy val extractFirst = buildExtractor[A1](inputExpressions(0))
+  lazy val extractSecond = buildExtractor[A2](inputExpressions(1))
+  lazy val extractThird = buildExtractor[A3](inputExpressions(2))
+  lazy val extractForth = buildExtractor[A4](inputExpressions(3))
+
+  lazy val serialize = buildSerializer[R]
+
+  override def eval(input: InternalRow): Any = {
+    val first = extractFirst(input)
+    val second = extractSecond(input)
+    val third = extractThird(input)
+    val forth = extractForth(input)
+    if (first != null && second != null && third != null && forth != null) {
+      serialize(f(first, second, third, forth))
+    } else {
+      null
+    }
+  }
+}
diff --git a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_constructors.scala b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_constructors.scala
index 6f34d37b..005d112f 100644
--- a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_constructors.scala
+++ b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_constructors.scala
@@ -57,13 +57,20 @@ object st_constructors extends DataFrameAPI {
   def ST_LineStringFromText(coords: Column, delimiter: Column): Column = wrapExpression[ST_LineStringFromText](coords, delimiter)
   def ST_LineStringFromText(coords: String, delimiter: String): Column = wrapExpression[ST_LineStringFromText](coords, delimiter)
 
-  def ST_Point(x: Column, y: Column): Column = wrapExpression[ST_Point](x, y, null)
-  def ST_Point(x: String, y: String): Column = wrapExpression[ST_Point](x, y, null)
-  def ST_Point(x: Double, y: Double): Column = wrapExpression[ST_Point](x, y, null)
-  def ST_Point(x: Column, y: Column, z: Column): Column = wrapExpression[ST_Point](x, y, z)
-  def ST_Point(x: String, y: String, z: String): Column = wrapExpression[ST_Point](x, y, z)
-  def ST_Point(x: Double, y: Double, z: Double): Column = wrapExpression[ST_Point](x, y, z)
- 
+  def ST_Point(x: Column, y: Column): Column = wrapExpression[ST_Point](x, y)
+  def ST_Point(x: String, y: String): Column = wrapExpression[ST_Point](x, y)
+  def ST_Point(x: Double, y: Double): Column = wrapExpression[ST_Point](x, y)
+
+  def ST_PointZ(x: Column, y: Column, z: Column): Column = wrapExpression[ST_PointZ](x, y, z, 0)
+  def ST_PointZ(x: String, y: String, z: String): Column = wrapExpression[ST_PointZ](x, y, z, 0)
+  def ST_PointZ(x: Double, y: Double, z: Double): Column = wrapExpression[ST_PointZ](x, y, z, 0)
+
+  def ST_PointZ(x: Column, y: Column, z: Column, srid: Column): Column = wrapExpression[ST_PointZ](x, y, z, srid)
+
+  def ST_PointZ(x: String, y: String, z: String, srid: Column): Column = wrapExpression[ST_PointZ](x, y, z, srid)
+
+  def ST_PointZ(x: Double, y: Double, z: Double, srid: Int): Column = wrapExpression[ST_PointZ](x, y, z, srid)
+
   def ST_PointFromText(coords: Column, delimiter: Column): Column = wrapExpression[ST_PointFromText](coords, delimiter)
   def ST_PointFromText(coords: String, delimiter: String): Column = wrapExpression[ST_PointFromText](coords, delimiter)
 
diff --git a/sql/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala b/sql/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
index 372fe9f7..69b1a166 100644
--- a/sql/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
+++ b/sql/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
@@ -44,8 +44,18 @@ class constructorTestScala extends TestBaseScala {
       assert(pointDf.count() == 1)
     }
 
-    it("Passed ST_Point 3D") {
-      val pointDf = sparkSession.sql("SELECT ST_Point(1.2345, 2.3456, 3.4567)")
+    it("Passed ST_Point null safety") {
+      val pointDf = sparkSession.sql("SELECT ST_Point(null, null)")
+      assert(pointDf.count() == 1)
+    }
+
+    it("Passed ST_PointZ") {
+      val pointDf = sparkSession.sql("SELECT ST_PointZ(1.2345, 2.3456, 3.4567)")
+      assert(pointDf.count() == 1)
+    }
+
+    it("Passed ST_PointZ null safety") {
+      val pointDf = sparkSession.sql("SELECT ST_PointZ(null, null, null)")
       assert(pointDf.count() == 1)
     }
 
diff --git a/sql/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala b/sql/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
index 2ae62491..d77c9f0b 100644
--- a/sql/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
+++ b/sql/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
@@ -40,6 +40,13 @@ class dataFrameAPITestScala extends TestBaseScala {
       assert(actualResult == expectedResult)
     }
 
+    it("passed st_pointz") {
+      val df = sparkSession.sql("SELECT 0.0 AS x, 1.0 AS y, 2.0 AS z").select(ST_AsText(ST_PointZ("x", "y", "z")))
+      val actualResult = df.take(1)(0).get(0).asInstanceOf[String]
+      val expectedResult = "POINT Z(0 1 2)"
+      assert(actualResult == expectedResult)
+    }
+
     it("passed st_pointfromtext") {
       val df = sparkSession.sql("SELECT '0.0,1.0' AS c").select(ST_PointFromText($"c", lit(',')))
       val actualResult = df.take(1)(0).get(0).asInstanceOf[Geometry].toText()
@@ -229,7 +236,7 @@ class dataFrameAPITestScala extends TestBaseScala {
     }
 
     it("Passed ST_3DDistance") {
-      val pointDf = sparkSession.sql("SELECT ST_Point(0.0, 0.0, 0.0) AS a, ST_Point(3.0, 0.0, 4.0) as b")
+      val pointDf = sparkSession.sql("SELECT ST_PointZ(0.0, 0.0, 0.0) AS a, ST_PointZ(3.0, 0.0, 4.0) as b")
       val df = pointDf.select(ST_3DDistance("a", "b"))
       val actualResult = df.take(1)(0).get(0).asInstanceOf[Double]
       val expectedResult = 5.0
@@ -419,7 +426,7 @@ class dataFrameAPITestScala extends TestBaseScala {
     }
 
     it("Should pass ST_X") {
-      val baseDf = sparkSession.sql("SELECT ST_Point(0.0, 1.0, 2.0) AS geom")
+      val baseDf = sparkSession.sql("SELECT ST_PointZ(0.0, 1.0, 2.0) AS geom")
       val df = baseDf.select(ST_X("geom"))
       val actualResult = df.take(1)(0).getDouble(0)
       val expectedResult = 0.0
@@ -427,7 +434,7 @@ class dataFrameAPITestScala extends TestBaseScala {
     }
 
     it("Should pass ST_Y") {
-      val baseDf = sparkSession.sql("SELECT ST_Point(0.0, 1.0, 2.0) AS geom")
+      val baseDf = sparkSession.sql("SELECT ST_PointZ(0.0, 1.0, 2.0) AS geom")
       val df = baseDf.select(ST_Y("geom"))
       val actualResult = df.take(1)(0).getDouble(0)
       val expectedResult = 1.0
@@ -435,7 +442,7 @@ class dataFrameAPITestScala extends TestBaseScala {
     }
 
     it("Should pass ST_Z") {
-      val baseDf = sparkSession.sql("SELECT ST_Point(0.0, 1.0, 2.0) AS geom")
+      val baseDf = sparkSession.sql("SELECT ST_PointZ(0.0, 1.0, 2.0) AS geom")
       val df = baseDf.select(ST_Z("geom"))
       val actualResult = df.take(1)(0).getDouble(0)
       val expectedResult = 2.0
@@ -692,7 +699,7 @@ class dataFrameAPITestScala extends TestBaseScala {
     }
 
     it ("Passed ST_Force_2D") {
-      val baseDf = sparkSession.sql("SELECT ST_Point(0.0, 0.0, 1.0) AS point")
+      val baseDf = sparkSession.sql("SELECT ST_PointZ(0.0, 0.0, 1.0) AS point")
       val df = baseDf.select(ST_Force_2D("point"))
       val actualResult = df.take(1)(0).get(0).asInstanceOf[Geometry].toText()
       val expectedResult = "POINT (0 0)"