You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Max Gekk (Jira)" <ji...@apache.org> on 2022/05/06 10:14:00 UTC

[jira] [Updated] (SPARK-36180) Support TimestampNTZ type in Hive

     [ https://issues.apache.org/jira/browse/SPARK-36180?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Max Gekk updated SPARK-36180:
-----------------------------
    Fix Version/s:     (was: 3.3.0)

> Support TimestampNTZ type in Hive
> ---------------------------------
>
>                 Key: SPARK-36180
>                 URL: https://issues.apache.org/jira/browse/SPARK-36180
>             Project: Spark
>          Issue Type: Sub-task
>          Components: SQL
>    Affects Versions: 3.3.0
>            Reporter: Kent Yao
>            Priority: Major
>
>  
> {code:java}
> [info] Caused by: java.lang.IllegalArgumentException: Error: type expected at the position 0 of 'timestamp_ntz:timestamp' but 'timestamp_ntz' is found.[info] Caused by: java.lang.IllegalArgumentException: Error: type expected at the position 0 of 'timestamp_ntz:timestamp' but 'timestamp_ntz' is found.[info]  at org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils$TypeInfoParser.expect(TypeInfoUtils.java:372)[info]  at org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils$TypeInfoParser.expect(TypeInfoUtils.java:355)[info]  at org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils$TypeInfoParser.parseType(TypeInfoUtils.java:416)[info]  at org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils$TypeInfoParser.parseTypeInfos(TypeInfoUtils.java:329)[info]  at org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getTypeInfosFromTypeString(TypeInfoUtils.java:814)[info]  at org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters.extractColumnInfo(LazySerDeParameters.java:162)[info]  at org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters.<init>(LazySerDeParameters.java:91)[info]  at org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.initialize(LazySimpleSerDe.java:116)[info]  at org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54)[info]  at org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:533)[info]  at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:453)[info]  at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:440)[info]  at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:281)[info]  at org.apache.hadoop.hive.ql.metadata.Table.checkValidity(Table.java:199)[info]  at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:842)[info]  ... 63 more[info]   at org.apache.hive.jdbc.HiveStatement.waitForOperationToComplete(HiveStatement.java:385)[info]   at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:254)[info]   at org.apache.spark.sql.hive.thriftserver.SparkMetadataOperationSuite.$anonfun$new$145(SparkMetadataOperationSuite.scala:666)[info]   at org.apache.spark.sql.hive.thriftserver.SparkMetadataOperationSuite.$anonfun$new$145$adapted(SparkMetadataOperationSuite.scala:665)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.$anonfun$withMultipleConnectionJdbcStatement$4(HiveThriftServer2Suites.scala:1422)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.$anonfun$withMultipleConnectionJdbcStatement$4$adapted(HiveThriftServer2Suites.scala:1422)[info]   at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)[info]   at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)[info]   at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.$anonfun$withMultipleConnectionJdbcStatement$1(HiveThriftServer2Suites.scala:1422)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.tryCaptureSysLog(HiveThriftServer2Suites.scala:1407)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.withMultipleConnectionJdbcStatement(HiveThriftServer2Suites.scala:1416)[info]   at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2TestBase.withJdbcStatement(HiveThriftServer2Suites.scala:1454)[info]   at org.apache.spark.sql.hive.thriftserver.SparkMetadataOperationSuite.$anonfun$new$144(SparkMetadataOperationSuite.scala:665)[info]   at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)[info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)[info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)[info]   at org.scalatest.Transformer.apply(Transformer.scala:22)[info]   at org.scalatest.Transformer.apply(Transformer.scala:20)[info]   at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)[info]   at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:190[info]   at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)[info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)[info]   at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:62)[info]   at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)[info]   at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)[info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:62)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)[info]   at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)[info]   at scala.collection.immutable.List.foreach(List.scala:431)[info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)[info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)[info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)[info]   at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563[info]   at org.scalatest.Suite.run(Suite.scala:1112)[info]   at org.scalatest.Suite.run$(Suite.scala:1094)[info]   at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)[info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:535)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)[info]   at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)[info]   at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:62)[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62)[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)[info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)[info]   at java.lang.Thread.run(Thread.java:748){code}



--
This message was sent by Atlassian Jira
(v8.20.7#820007)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org