You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Apache Spark (JIRA)" <ji...@apache.org> on 2018/01/12 14:58:00 UTC

[jira] [Commented] (SPARK-23057) SET LOCATION should change the path of partition in table

    [ https://issues.apache.org/jira/browse/SPARK-23057?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16324074#comment-16324074 ] 

Apache Spark commented on SPARK-23057:
--------------------------------------

User 'xubo245' has created a pull request for this issue:
https://github.com/apache/spark/pull/20249

> SET LOCATION should change the path of partition in table
> ---------------------------------------------------------
>
>                 Key: SPARK-23057
>                 URL: https://issues.apache.org/jira/browse/SPARK-23057
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.2.1
>            Reporter: xubo245
>            Priority: Minor
>
> According to https://issues.apache.org/jira/browse/SPARK-19235  and https://github.com/apache/spark/pull/16592#pullrequestreview-88085571, 
> {code:java}
> When porting these test cases, a bug of SET LOCATION is found. path is not set when the location is changed.
> {code}
> in org.apache.spark.sql.execution.command.DDLSuite#testSetLocation:
> {code:java}
>  // TODO(gatorsmile): fix the bug in alter table set location.
>       // if (isUsingHiveMetastore) {
>       //  assert(storageFormat.properties.get("path") === expected)
>       // }
> {code}
> So test it:
> There is a error in :
> {code:java}
>     // set table partition location
>     sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='2') SET LOCATION '/path/to/part/ways'")
>     verifyLocation(new URI("/path/to/part/ways"), Some(partSpec))
> {code}
> I also add test cases:
> {code:java}
>   test("SET LOCATION should change the path of partition in tabl") {
>     withTable("boxes") {
>       sql("CREATE TABLE boxes (height INT, length INT) PARTITIONED BY (width INT) LOCATION '/new'")
>       sql("INSERT OVERWRITE TABLE boxes PARTITION (width=4) SELECT 4, 4")
>       val expected = "/path/to/part/ways"
>       sql(s"ALTER TABLE boxes PARTITION (width=4) SET LOCATION '$expected'")
>       val catalog = spark.sessionState.catalog
>       val partSpec = Map("width" -> "4")
>       val spec = Some(partSpec)
>       val tableIdent = TableIdentifier("boxes", Some("default"))
>       val storageFormat = spec
>         .map { s => catalog.getPartition(tableIdent, s).storage }
>         .getOrElse {
>           catalog.getTableMetadata(tableIdent).storage
>         }
>       assert(storageFormat.properties.get("path").get === expected)
>     }
>   }
> {code}
> Error:
> {code:java}
> 05:46:48.213 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database global_temp, returning NoSuchObjectException
> None.get
> java.util.NoSuchElementException: None.get
> 	at scala.None$.get(Option.scala:347)
> 	at scala.None$.get(Option.scala:345)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite$$anonfun$32$$anonfun$apply$mcV$sp$22.apply$mcV$sp(HiveDDLSuite.scala:768)
> 	at org.apache.spark.sql.test.SQLTestUtilsBase$class.withTable(SQLTestUtils.scala:273)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite.withTable(HiveDDLSuite.scala:261)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite$$anonfun$32.apply$mcV$sp(HiveDDLSuite.scala:754)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite$$anonfun$32.apply(HiveDDLSuite.scala:754)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite$$anonfun$32.apply(HiveDDLSuite.scala:754)
> 	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
> 	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> 	at org.scalatest.Transformer.apply(Transformer.scala:22)
> 	at org.scalatest.Transformer.apply(Transformer.scala:20)
> 	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
> 	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
> 	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
> 	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
> 	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
> 	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
> 	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(HiveDDLSuite.scala:261)
> 	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:221)
> 	at org.apache.spark.sql.hive.execution.HiveDDLSuite.runTest(HiveDDLSuite.scala:261)
> 	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
> 	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
> 	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
> 	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
> 	at scala.collection.immutable.List.foreach(List.scala:381)
> 	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
> 	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
> 	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
> 	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
> 	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
> 	at org.scalatest.Suite$class.run(Suite.scala:1147)
> 	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
> 	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
> 	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
> 	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
> 	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
> 	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
> 	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
> 	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
> 	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
> 	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
> 	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
> 	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
> 	at scala.collection.immutable.List.foreach(List.scala:381)
> 	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
> 	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
> 	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
> 	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
> 	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
> 	at org.scalatest.tools.Runner$.run(Runner.scala:850)
> 	at org.scalatest.tools.Runner.run(Runner.scala)
> 	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)
> 	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org