You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Hyukjin Kwon (JIRA)" <ji...@apache.org> on 2016/11/03 01:20:58 UTC

[jira] [Commented] (SPARK-14840) Cannot drop a table which has the name starting with 'or'

    [ https://issues.apache.org/jira/browse/SPARK-14840?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15631134#comment-15631134 ] 

Hyukjin Kwon commented on SPARK-14840:
--------------------------------------

Please revoke my action if this is inappropriate.

> Cannot drop a table which has the name starting with 'or'
> ---------------------------------------------------------
>
>                 Key: SPARK-14840
>                 URL: https://issues.apache.org/jira/browse/SPARK-14840
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.6.2
>            Reporter: Kwangwoo Kim
>
> sqlContext("drop table tmp.order")  
> The above code makes error as following: 
> 6/04/22 14:27:17 INFO ParseDriver: Parsing command: drop table tmp.order
> 16/04/22 14:27:19 INFO ParseDriver: Parse Completed
> 16/04/22 14:27:19 WARN DropTable: [1.5] failure: identifier expected
> tmp.order
>     ^
> java.lang.RuntimeException: [1.5] failure: identifier expected
> tmp.order
>     ^
> 	at scala.sys.package$.error(package.scala:27)
> 	at org.apache.spark.sql.catalyst.SqlParser$.parseTableIdentifier(SqlParser.scala:58)
> 	at org.apache.spark.sql.SQLContext.table(SQLContext.scala:827)
> 	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:62)
> 	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
> 	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
> 	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
> 	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
> 	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
> 	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
> 	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
> 	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
> 	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
> 	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
> 	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
> 	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
> 	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
> 	at $line15.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
> 	at $line15.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
> 	at $line15.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
> 	at $line15.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
> 	at $line15.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:37)
> 	at $line15.$read$$iwC$$iwC$$iwC.<init>(<console>:39)
> 	at $line15.$read$$iwC$$iwC.<init>(<console>:41)
> 	at $line15.$read$$iwC.<init>(<console>:43)
> 	at $line15.$read.<init>(<console>:45)
> 	at $line15.$read$.<init>(<console>:49)
> 	at $line15.$read$.<clinit>(<console>)
> 	at $line15.$eval$.<init>(<console>:7)
> 	at $line15.$eval$.<clinit>(<console>)
> 	at $line15.$eval.$print(<console>)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:497)
> 	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> 	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
> 	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> 	at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
> 	at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
> 	at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> 	at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
> 	at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
> 	at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> 	at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> 	at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
> 	at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> 	at org.apache.spark.repl.Main$.main(Main.scala:31)
> 	at org.apache.spark.repl.Main.main(Main.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:497)
> 	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
> 	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
> 	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> java.lang.RuntimeException: [1.5] failure: identifier expected
> tmp.order
>     ^
> 	at scala.sys.package$.error(package.scala:27)
> 	at org.apache.spark.sql.catalyst.SqlParser$.parseTableIdentifier(SqlParser.scala:58)
> 	at org.apache.spark.sql.hive.HiveContext.invalidateTable(HiveContext.scala:351)
> 	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:72)
> 	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
> 	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
> 	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
> 	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
> 	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
> 	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
> 	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
> 	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
> 	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
> 	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
> 	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
> 	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
> 	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
> 	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
> 	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
> 	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
> 	at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
> 	at $iwC$$iwC$$iwC$$iwC.<init>(<console>:37)
> 	at $iwC$$iwC$$iwC.<init>(<console>:39)
> 	at $iwC$$iwC.<init>(<console>:41)
> 	at $iwC.<init>(<console>:43)
> 	at <init>(<console>:45)
> 	at .<init>(<console>:49)
> 	at .<clinit>(<console>)
> 	at .<init>(<console>:7)
> 	at .<clinit>(<console>)
> 	at $print(<console>)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:497)
> 	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> 	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
> 	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> 	at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
> 	at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
> 	at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> 	at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
> 	at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
> 	at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> 	at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> 	at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
> 	at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> 	at org.apache.spark.repl.Main$.main(Main.scala:31)
> 	at org.apache.spark.repl.Main.main(Main.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:497)
> 	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
> 	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
> 	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org