You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Liu Shuo (Jira)" <ji...@apache.org> on 2023/03/09 13:49:00 UTC

[jira] [Updated] (SPARK-42734) deal with timestamp partition column when spark.sql.storeAssignmentPolicy=LEGACY

     [ https://issues.apache.org/jira/browse/SPARK-42734?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Liu Shuo updated SPARK-42734:
-----------------------------
    Description: 
We create a table which has a partition column with timestamp type, when we execute `alter table partition rename to partition`, it probably fail.

We can reproduce it with the following sql:
{code:java}
set spark.sql.storeAssignmentPolicy = LEGACY;
create table t1(c_timestamp timestamp) using parquet partitioned by (p_id int, p_timestamp timestamp); 
insert into t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0') values('2016-8-1 11:45:15.0');
alter table t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0')  rename to partition(p_id=2, p_timestamp='2016-08-01 11:45:15.0');{code}
The above case will fail with the following error.

Partition not found in table 't1' database 'default':
p_id -> 1
p_timestamp -> 2016-08-01 11:45:15.0
org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException: Partition not found in table 't1' database 'default':
p_id -> 1
p_timestamp -> 2016-08-01 11:45:15.0
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1(InMemoryCatalog.scala:82)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1$adapted(InMemoryCatalog.scala:80)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.requirePartitionsExist(InMemoryCatalog.scala:80)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.renamePartitions(InMemoryCatalog.scala:494)
    at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.renamePartitions(ExternalCatalogWithListener.scala:219)
    at org.apache.spark.sql.catalyst.catalog.SessionCatalog.renamePartitions(SessionCatalog.scala:1213)
    at org.apache.spark.sql.execution.command.AlterTableRenamePartitionCommand.run(ddl.scala:568)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:111)
    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:171)
    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488)
    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
    at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:232)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$991(SQLQuerySuite.scala:4747)
    at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
    at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
    at org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:247)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:245)
    at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$990(SQLQuerySuite.scala:4741)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1491)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:306)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:304)
    at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$989(SQLQuerySuite.scala:4741)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
    at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
    at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
    at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    at org.scalatest.Transformer.apply(Transformer.scala:22)
    at org.scalatest.Transformer.apply(Transformer.scala:20)
    at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
    at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:207)
    at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
    at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
    at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:66)
    at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
    at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
    at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:66)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
    at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
    at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
    at org.scalatest.Suite.run(Suite.scala:1114)
    at org.scalatest.Suite.run$(Suite.scala:1096)
    at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
    at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
    at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
    at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
    at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:66)
    at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
    at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:66)
    at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:47)
    at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1321)
    at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1315)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1315)
    at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:992)
    at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:970)
    at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1481)
    at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:970)
    at org.scalatest.tools.Runner$.run(Runner.scala:798)
    at org.scalatest.tools.Runner.run(Runner.scala)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)

  was:
We create a table which has a partition column with timestamp type, when we execute `alter table partition rename to partition`, it probably fail.

We can reproduce it with the following sql:
{code:java}
set spark.sql.storeAssignmentPolicy = LEGACY;
create table t1(c_timestamp timestamp) using parquet partitioned by (p_id int, p_timestamp timestamp); 
insert into t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0') values('2016-8-1 11:45:15.0');
alter table t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0')  rename to partition(p_id=2, p_timestamp='2016-08-01 11:45:15.0');{code}
Partition not found in table 't1' database 'default':
p_id -> 1
p_timestamp -> 2016-08-01 11:45:15.0
org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException: Partition not found in table 't1' database 'default':
p_id -> 1
p_timestamp -> 2016-08-01 11:45:15.0
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1(InMemoryCatalog.scala:82)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1$adapted(InMemoryCatalog.scala:80)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.requirePartitionsExist(InMemoryCatalog.scala:80)
    at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.renamePartitions(InMemoryCatalog.scala:494)
    at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.renamePartitions(ExternalCatalogWithListener.scala:219)
    at org.apache.spark.sql.catalyst.catalog.SessionCatalog.renamePartitions(SessionCatalog.scala:1213)
    at org.apache.spark.sql.execution.command.AlterTableRenamePartitionCommand.run(ddl.scala:568)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:111)
    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:171)
    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488)
    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
    at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:232)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$991(SQLQuerySuite.scala:4747)
    at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
    at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
    at org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:247)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:245)
    at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$990(SQLQuerySuite.scala:4741)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1491)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:306)
    at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:304)
    at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:60)
    at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$989(SQLQuerySuite.scala:4741)
    at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
    at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
    at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
    at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    at org.scalatest.Transformer.apply(Transformer.scala:22)
    at org.scalatest.Transformer.apply(Transformer.scala:20)
    at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
    at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:207)
    at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
    at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
    at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:66)
    at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
    at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
    at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:66)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
    at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
    at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
    at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
    at org.scalatest.Suite.run(Suite.scala:1114)
    at org.scalatest.Suite.run$(Suite.scala:1096)
    at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
    at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
    at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
    at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
    at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
    at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:66)
    at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
    at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:66)
    at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:47)
    at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1321)
    at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1315)
    at scala.collection.immutable.List.foreach(List.scala:431)
    at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1315)
    at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:992)
    at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:970)
    at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1481)
    at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:970)
    at org.scalatest.tools.Runner$.run(Runner.scala:798)
    at org.scalatest.tools.Runner.run(Runner.scala)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)


> deal with timestamp partition column when spark.sql.storeAssignmentPolicy=LEGACY
> --------------------------------------------------------------------------------
>
>                 Key: SPARK-42734
>                 URL: https://issues.apache.org/jira/browse/SPARK-42734
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.1.0, 3.3.1, 3.4.0
>            Reporter: Liu Shuo
>            Priority: Major
>
> We create a table which has a partition column with timestamp type, when we execute `alter table partition rename to partition`, it probably fail.
> We can reproduce it with the following sql:
> {code:java}
> set spark.sql.storeAssignmentPolicy = LEGACY;
> create table t1(c_timestamp timestamp) using parquet partitioned by (p_id int, p_timestamp timestamp); 
> insert into t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0') values('2016-8-1 11:45:15.0');
> alter table t1 partition(p_id=1, p_timestamp='2016-08-01 11:45:15.0')  rename to partition(p_id=2, p_timestamp='2016-08-01 11:45:15.0');{code}
> The above case will fail with the following error.
> Partition not found in table 't1' database 'default':
> p_id -> 1
> p_timestamp -> 2016-08-01 11:45:15.0
> org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException: Partition not found in table 't1' database 'default':
> p_id -> 1
> p_timestamp -> 2016-08-01 11:45:15.0
>     at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1(InMemoryCatalog.scala:82)
>     at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.$anonfun$requirePartitionsExist$1$adapted(InMemoryCatalog.scala:80)
>     at scala.collection.immutable.List.foreach(List.scala:431)
>     at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.requirePartitionsExist(InMemoryCatalog.scala:80)
>     at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.renamePartitions(InMemoryCatalog.scala:494)
>     at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.renamePartitions(ExternalCatalogWithListener.scala:219)
>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.renamePartitions(SessionCatalog.scala:1213)
>     at org.apache.spark.sql.execution.command.AlterTableRenamePartitionCommand.run(ddl.scala:568)
>     at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
>     at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
>     at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:111)
>     at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:171)
>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
>     at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
>     at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
>     at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488)
>     at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
>     at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
>     at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
>     at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
>     at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
>     at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
>     at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:232)
>     at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$991(SQLQuerySuite.scala:4747)
>     at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
>     at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
>     at org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:60)
>     at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:247)
>     at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:245)
>     at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:60)
>     at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$990(SQLQuerySuite.scala:4741)
>     at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>     at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1491)
>     at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:306)
>     at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:304)
>     at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:60)
>     at org.apache.spark.sql.SQLQuerySuite.$anonfun$new$989(SQLQuerySuite.scala:4741)
>     at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>     at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
>     at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
>     at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
>     at org.scalatest.Transformer.apply(Transformer.scala:22)
>     at org.scalatest.Transformer.apply(Transformer.scala:20)
>     at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
>     at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:207)
>     at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
>     at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
>     at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
>     at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
>     at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
>     at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:66)
>     at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
>     at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
>     at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:66)
>     at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
>     at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
>     at scala.collection.immutable.List.foreach(List.scala:431)
>     at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
>     at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
>     at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
>     at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
>     at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
>     at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
>     at org.scalatest.Suite.run(Suite.scala:1114)
>     at org.scalatest.Suite.run$(Suite.scala:1096)
>     at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
>     at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
>     at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
>     at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
>     at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
>     at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:66)
>     at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
>     at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
>     at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
>     at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:66)
>     at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:47)
>     at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1321)
>     at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1315)
>     at scala.collection.immutable.List.foreach(List.scala:431)
>     at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1315)
>     at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:992)
>     at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:970)
>     at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1481)
>     at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:970)
>     at org.scalatest.tools.Runner$.run(Runner.scala:798)
>     at org.scalatest.tools.Runner.run(Runner.scala)
>     at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
>     at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org