You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Sean Owen (JIRA)" <ji...@apache.org> on 2015/12/09 20:49:11 UTC

[jira] [Resolved] (SPARK-11824) WebUI throws console error for descriptions with 'bad' HTML

     [ https://issues.apache.org/jira/browse/SPARK-11824?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Sean Owen resolved SPARK-11824.
-------------------------------
       Resolution: Fixed
    Fix Version/s: 1.6.1
                   2.0.0

Issue resolved by pull request 10159
[https://github.com/apache/spark/pull/10159]

> WebUI throws console error for descriptions with 'bad' HTML
> -----------------------------------------------------------
>
>                 Key: SPARK-11824
>                 URL: https://issues.apache.org/jira/browse/SPARK-11824
>             Project: Spark
>          Issue Type: Improvement
>          Components: SQL, Web UI
>    Affects Versions: 1.5.2
>         Environment: RHEL 6, Java 1.7, Mesos 0.25.0
>            Reporter: Andy Robb
>            Priority: Minor
>              Labels: starter
>             Fix For: 2.0.0, 1.6.1
>
>
> When using SparkSQL CLI, running a query with less-than or greater-than symbols in a query, viewing the Web UI will throw the following console warning. (The table and column names have been changed from the actual query.)
> This occurs across CLI invocations. The warning is thrown each time the UI is refreshed, both during query execution and after the query is complete. 
> {noformat}
> 15/11/18 10:45:31 WARN ui.UIUtils: Invalid job description: select count(1) from table1 where date >= '2015-11-01' and date <= '2015-11-15' 
> org.xml.sax.SAXParseException; lineNumber: 1; columnNumber: 114; The content of elements must consist of well-formed character data or markup.
> 	at com.sun.org.apache.xerces.internal.util.ErrorHandlerWrapper.createSAXParseException(ErrorHandlerWrapper.java:198)
> 	at com.sun.org.apache.xerces.internal.util.ErrorHandlerWrapper.fatalError(ErrorHandlerWrapper.java:177)
> 	at com.sun.org.apache.xerces.internal.impl.XMLErrorReporter.reportError(XMLErrorReporter.java:441)
> 	at com.sun.org.apache.xerces.internal.impl.XMLErrorReporter.reportError(XMLErrorReporter.java:368)
> 	at com.sun.org.apache.xerces.internal.impl.XMLScanner.reportFatalError(XMLScanner.java:1436)
> 	at com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl$FragmentContentDriver.startOfMarkup(XMLDocumentFragmentScannerImpl.java:2636)
> 	at com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl$FragmentContentDriver.next(XMLDocumentFragmentScannerImpl.java:2734)
> 	at com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl.next(XMLDocumentScannerImpl.java:606)
> 	at com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl.scanDocument(XMLDocumentFragmentScannerImpl.java:510)
> 	at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:848)
> 	at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:777)
> 	at com.sun.org.apache.xerces.internal.parsers.XMLParser.parse(XMLParser.java:141)
> 	at com.sun.org.apache.xerces.internal.parsers.AbstractSAXParser.parse(AbstractSAXParser.java:1213)
> 	at com.sun.org.apache.xerces.internal.jaxp.SAXParserImpl$JAXPSAXParser.parse(SAXParserImpl.java:648)
> 	at com.sun.org.apache.xerces.internal.jaxp.SAXParserImpl.parse(SAXParserImpl.java:332)
> 	at scala.xml.factory.XMLLoader$class.loadXML(XMLLoader.scala:40)
> 	at scala.xml.XML$.loadXML(XML.scala:57)
> 	at scala.xml.factory.XMLLoader$class.loadString(XMLLoader.scala:59)
> 	at scala.xml.XML$.loadString(XML.scala:57)
> 	at org.apache.spark.ui.UIUtils$.makeDescription(UIUtils.scala:417)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$5$$anonfun$apply$1.apply(StageTable.scala:118)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$5$$anonfun$apply$1.apply(StageTable.scala:116)
> 	at scala.Option.map(Option.scala:145)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$5.apply(StageTable.scala:116)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$5.apply(StageTable.scala:115)
> 	at scala.Option.flatMap(Option.scala:170)
> 	at org.apache.spark.ui.jobs.StageTableBase.makeDescription(StageTable.scala:115)
> 	at org.apache.spark.ui.jobs.StageTableBase.stageRow(StageTable.scala:177)
> 	at org.apache.spark.ui.jobs.StageTableBase.org$apache$spark$ui$jobs$StageTableBase$$renderStageRow(StageTable.scala:195)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$toNodeSeq$1.apply(StageTable.scala:60)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$toNodeSeq$1.apply(StageTable.scala:60)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$stageTable$1.apply(StageTable.scala:69)
> 	at org.apache.spark.ui.jobs.StageTableBase$$anonfun$stageTable$1.apply(StageTable.scala:69)
> 	at scala.collection.immutable.Stream.map(Stream.scala:376)
> 	at org.apache.spark.ui.jobs.StageTableBase.stageTable(StageTable.scala:69)
> 	at org.apache.spark.ui.jobs.StageTableBase.toNodeSeq(StageTable.scala:60)
> 	at org.apache.spark.ui.jobs.AllStagesPage.render(AllStagesPage.scala:121)
> 	at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:79)
> 	at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:79)
> 	at org.apache.spark.ui.JettyUtils$$anon$1.doGet(JettyUtils.scala:69)
> 	at javax.servlet.http.HttpServlet.service(HttpServlet.java:735)
> 	at javax.servlet.http.HttpServlet.service(HttpServlet.java:848)
> 	at org.spark-project.jetty.servlet.ServletHolder.handle(ServletHolder.java:684)
> 	at org.spark-project.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501)
> 	at org.spark-project.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086)
> 	at org.spark-project.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428)
> 	at org.spark-project.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020)
> 	at org.spark-project.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
> 	at org.spark-project.jetty.server.handler.GzipHandler.handle(GzipHandler.java:264)
> 	at org.spark-project.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:255)
> 	at org.spark-project.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
> 	at org.spark-project.jetty.server.Server.handle(Server.java:370)
> 	at org.spark-project.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:494)
> 	at org.spark-project.jetty.server.AbstractHttpConnection.headerComplete(AbstractHttpConnection.java:971)
> 	at org.spark-project.jetty.server.AbstractHttpConnection$RequestHandler.headerComplete(AbstractHttpConnection.java:1033)
> 	at org.spark-project.jetty.http.HttpParser.parseNext(HttpParser.java:644)
> 	at org.spark-project.jetty.http.HttpParser.parseAvailable(HttpParser.java:235)
> 	at org.spark-project.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
> 	at org.spark-project.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:667)
> 	at org.spark-project.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52)
> 	at org.spark-project.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608)
> 	at org.spark-project.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543)
> 	at java.lang.Thread.run(Thread.java:745)
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org