You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/07/01 17:06:30 UTC

[GitHub] [spark] jose-torres commented on a change in pull request #24937: [SPARK-28139][SQL] Add v2 ALTER TABLE implementation.

jose-torres commented on a change in pull request #24937: [SPARK-28139][SQL] Add v2 ALTER TABLE implementation.
URL: https://github.com/apache/spark/pull/24937#discussion_r299136980
 
 

 ##########
 File path: sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
 ##########
 @@ -344,4 +345,834 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
         df_joined)
     }
   }
+
+  test("AlterTable: table does not exist") {
+    val exc = intercept[AnalysisException] {
+      sql(s"ALTER TABLE testcat.ns1.table_name DROP COLUMN id")
+    }
+
+    assert(exc.getMessage.contains("testcat.ns1.table_name"))
+    assert(exc.getMessage.contains("Table or view not found"))
+  }
+
+  test("AlterTable: change rejected by implementation") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[SparkException] {
+        sql(s"ALTER TABLE $t DROP COLUMN id")
+      }
+
+      assert(exc.getMessage.contains("Unsupported table change"))
+      assert(exc.getMessage.contains("Cannot drop all fields")) // from the implementation
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType().add("id", IntegerType))
+    }
+  }
+
+  test("AlterTable: add top-level column") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN data string")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType().add("id", IntegerType).add("data", StringType))
+    }
+  }
+
+  test("AlterTable: add column with comment") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN data string COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == StructType(Seq(
+        StructField("id", IntegerType),
+        StructField("data", StringType).withComment("doc"))))
+    }
+  }
+
+  test("AlterTable: add multiple columns") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMNS data string COMMENT 'doc', ts timestamp")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == StructType(Seq(
+        StructField("id", IntegerType),
+        StructField("data", StringType).withComment("doc"),
+        StructField("ts", TimestampType))))
+    }
+  }
+
+  test("AlterTable: add nested column") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN point.z double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType),
+            StructField("z", DoubleType)))))
+    }
+  }
+
+  test("AlterTable: add nested column to map key") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<struct<x: double, y: double>, bigint>) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN points.key.z double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType),
+            StructField("z", DoubleType))), LongType)))
+    }
+  }
+
+  test("AlterTable: add nested column to map value") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN points.value.z double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StringType, StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType),
+            StructField("z", DoubleType))))))
+    }
+  }
+
+  test("AlterTable: add nested column to array element") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN points.element.z double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType),
+            StructField("z", DoubleType))))))
+    }
+  }
+
+  test("AlterTable: add complex column") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN points array<struct<x: double, y: double>>")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType))))))
+    }
+  }
+
+  test("AlterTable: add nested column with comment") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ADD COLUMN points.element.z double COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType),
+            StructField("z", DoubleType).withComment("doc"))))))
+    }
+  }
+
+  test("AlterTable: add nested column parent must exist") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ADD COLUMN point.z double")
+      }
+
+      assert(exc.getMessage.contains("point"))
+      assert(exc.getMessage.contains("missing field"))
+    }
+  }
+
+  test("AlterTable: update column type int -> long") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN id TYPE bigint")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType().add("id", LongType))
+    }
+  }
+
+  test("AlterTable: update nested type float -> double") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point struct<x: float, y: double>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN point.x TYPE double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType)))))
+    }
+  }
+
+  test("AlterTable: update column with struct type fails") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN point TYPE struct<x: double, y: double, z: double>")
+      }
+
+      assert(exc.getMessage.contains("point"))
+      assert(exc.getMessage.contains("update a struct by adding, deleting, or updating its fields"))
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType)))))
+    }
+  }
+
+  test("AlterTable: update column with array type fails") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<int>) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN points TYPE array<long>")
+      }
+
+      assert(exc.getMessage.contains("update the element by updating points.element"))
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(IntegerType)))
+    }
+  }
+
+  test("AlterTable: update column array element type") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<int>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.element TYPE long")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(LongType)))
+    }
+  }
+
+  test("AlterTable: update column with map type fails") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, m map<string, int>) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN m TYPE map<string, long>")
+      }
+
+      assert(exc.getMessage.contains("update a map by updating m.key or m.value"))
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("m", MapType(StringType, IntegerType)))
+    }
+  }
+
+  test("AlterTable: update column map value type") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, m map<string, int>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN m.value TYPE long")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("m", MapType(StringType, LongType)))
+    }
+  }
+
+  test("AlterTable: update nested type in map key") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<struct<x: float, y: double>, bigint>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.key.x TYPE double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType))), LongType)))
+    }
+  }
+
+  test("AlterTable: update nested type in map value") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<string, struct<x: float, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.value.x TYPE double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StringType, StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType))))))
+    }
+  }
+
+  test("AlterTable: update nested type in array") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<struct<x: float, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.element.x TYPE double")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType))))))
+    }
+  }
+
+  test("AlterTable: update column must exist") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN data TYPE string")
+      }
+
+      assert(exc.getMessage.contains("data"))
+      assert(exc.getMessage.contains("missing field"))
+    }
+  }
+
+  test("AlterTable: nested update column must exist") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN point.x TYPE double")
+      }
+
+      assert(exc.getMessage.contains("point.x"))
+      assert(exc.getMessage.contains("missing field"))
+    }
+  }
+
+  test("AlterTable: update column type must be compatible") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN id TYPE boolean")
+      }
+
+      assert(exc.getMessage.contains("id"))
+      assert(exc.getMessage.contains("int cannot be cast to boolean"))
+    }
+  }
+
+  test("AlterTable: update column comment") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN id COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == StructType(Seq(StructField("id", IntegerType).withComment("doc"))))
+    }
+  }
+
+  test("AlterTable: update column type and comment") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN id TYPE bigint COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == StructType(Seq(StructField("id", LongType).withComment("doc"))))
+    }
+  }
+
+  test("AlterTable: update nested column comment") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN point.y COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType).withComment("doc")))))
+    }
+  }
+
+  test("AlterTable: update nested column comment in map key") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<struct<x: double, y: double>, bigint>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.key.y COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType).withComment("doc"))), LongType)))
+    }
+  }
+
+  test("AlterTable: update nested column comment in map value") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.value.y COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", MapType(StringType, StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType).withComment("doc"))))))
+    }
+  }
+
+  test("AlterTable: update nested column comment in array") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t ALTER COLUMN points.element.y COMMENT 'doc'")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("points", ArrayType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("y", DoubleType).withComment("doc"))))))
+    }
+  }
+
+  test("AlterTable: comment update column must exist") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN data COMMENT 'doc'")
+      }
+
+      assert(exc.getMessage.contains("data"))
+      assert(exc.getMessage.contains("missing field"))
+    }
+  }
+
+  test("AlterTable: nested comment update column must exist") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+
+      val exc = intercept[AnalysisException] {
+        sql(s"ALTER TABLE $t ALTER COLUMN point.x COMMENT 'doc'")
+      }
+
+      assert(exc.getMessage.contains("point.x"))
+      assert(exc.getMessage.contains("missing field"))
+    }
+  }
+
+  test("AlterTable: rename column") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int) USING foo")
+      sql(s"ALTER TABLE $t RENAME COLUMN id TO user_id")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType().add("user_id", IntegerType))
+    }
+  }
+
+  test("AlterTable: rename nested column") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING foo")
+      sql(s"ALTER TABLE $t RENAME COLUMN point.y TO t")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("t", DoubleType)))))
+    }
+  }
+
+  test("AlterTable: rename nested column in map key") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, point map<struct<x: double, y: double>, bigint>) USING foo")
+      sql(s"ALTER TABLE $t RENAME COLUMN point.key.y TO t")
+
+      val testCatalog = spark.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
+
+      assert(table.name == "testcat.ns1.table_name")
+      assert(table.schema == new StructType()
+          .add("id", IntegerType)
+          .add("point", MapType(StructType(Seq(
+            StructField("x", DoubleType),
+            StructField("t", DoubleType))), LongType)))
+    }
+  }
+
+  test("AlterTable: rename nested column in map value") {
+    val t = "testcat.ns1.table_name"
+    withTable(t) {
+      sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) USING foo")
+      sql(s"ALTER TABLE $t RENAME COLUMN points.value.y TO t")
 
 Review comment:
   General question more related to the parser than here: are there tests ensuring that something dangerous doesn't happen if you try to rename/alter/whatever a special identifier like "points.value" directly?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org