You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by "LuciferYang (via GitHub)" <gi...@apache.org> on 2024/03/19 03:51:18 UTC

Re: [PR] [SPARK-47455][BUILD] Fix Resource Handling of `scalaStyleOnCompileConfig` in `SparkBuild.scala` [spark]

LuciferYang commented on PR #45582:
URL: https://github.com/apache/spark/pull/45582#issuecomment-2005703619

   ```
   java.lang.IllegalArgumentException: requirement failed: Could not rewrite 'customId="println" level="error"' in original scalastyle config.
   	at scala.Predef$.require(Predef.scala:281)
   	at SparkBuild$.$anonfun$scalaStyleOnCompileConfig$2(SparkBuild.scala:167)
   	at SparkBuild$.$anonfun$scalaStyleOnCompileConfig$2$adapted(SparkBuild.scala:166)
   	at scala.collection.TraversableLike$WithFilter.$anonfun$foreach$1(TraversableLike.scala:985)
   	at scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)
   	at scala.collection.mutable.HashTable.foreachEntry(HashTable.scala:237)
   	at scala.collection.mutable.HashTable.foreachEntry$(HashTable.scala:230)
   	at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:44)
   	at scala.collection.mutable.HashMap.foreach(HashMap.scala:149)
   	at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:984)
   	at SparkBuild$.<init>(SparkBuild.scala:166)
   	at SparkBuild$.<clinit>(SparkBuild.scala)
   	at java.base/java.lang.Class.forName0(Native Method)
   	at java.base/java.lang.Class.forName(Class.java:467)
   	at sbt.internal.inc.ModuleUtilities$.getObject(ModuleUtilities.scala:24)
   	at sbt.internal.inc.ModuleUtilities$.getCheckedObject(ModuleUtilities.scala:32)
   	at sbt.internal.inc.ModuleUtilities$.$anonfun$getCheckedObjects$1(ModuleUtilities.scala:37)
   	at scala.collection.immutable.Stream.map(Stream.scala:418)
   	at sbt.internal.inc.ModuleUtilities$.getCheckedObjects(ModuleUtilities.scala:37)
   	at sbt.internal.PluginDiscovery$.loadModules(PluginDiscovery.scala:164)
   	at sbt.internal.PluginDiscovery$.binarySourceModules(PluginDiscovery.scala:154)
   	at sbt.internal.PluginDiscovery$.discover$1(PluginDiscovery.scala:44)
   	at sbt.internal.PluginDiscovery$.discoverAll(PluginDiscovery.scala:63)
   	at sbt.internal.Load$.loadPlugins(Load.scala:1330)
   	at sbt.internal.Load$.loadPluginDefinition(Load.scala:1275)
   	at sbt.internal.Load$.buildPlugins(Load.scala:1254)
   	at sbt.internal.Load$.plugins(Load.scala:1233)
   	at sbt.internal.Load$.$anonfun$loadUnit$2(Load.scala:701)
   	at sbt.internal.Load$.timed(Load.scala:1407)
   	at sbt.internal.Load$.$anonfun$loadUnit$1(Load.scala:701)
   	at sbt.internal.Load$.timed(Load.scala:1407)
   	at sbt.internal.Load$.loadUnit(Load.scala:695)
   	at sbt.internal.Load$.$anonfun$builtinLoader$4(Load.scala:493)
   	at sbt.internal.BuildLoader$.$anonfun$componentLoader$5(BuildLoader.scala:181)
   	at sbt.internal.BuildLoader.apply(BuildLoader.scala:246)
   	at sbt.internal.Load$.loadURI$1(Load.scala:555)
   	at sbt.internal.Load$.loadAll(Load.scala:571)
   	at sbt.internal.Load$.loadURI(Load.scala:[50](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:51)1)
   	at sbt.internal.Load$.load(Load.scala:480)
   	at sbt.internal.Load$.$anonfun$apply$1(Load.scala:242)
   	at sbt.internal.Load$.timed(Load.scala:1407)
   	at sbt.internal.Load$.apply(Load.scala:242)
   	at sbt.internal.Load$.defaultLoad(Load.scala:57)
   	at sbt.BuiltinCommands$.liftedTree1$1(Main.scala:964)
   	at sbt.BuiltinCommands$.doLoadProject(Main.scala:964)
   	at sbt.BuiltinCommands$.$anonfun$loadProjectImpl$2(Main.scala:917)
   	at sbt.Command$.$anonfun$applyEffect$4(Command.scala:1[51](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:52))
   	at sbt.Command$.$anonfun$applyEffect$2(Command.scala:146)
   	at sbt.Command$.process(Command.scala:190)
   	at sbt.MainLoop$.$anonfun$processCommand$5(MainLoop.scala:246)
   	at scala.Option.getOrElse(Option.scala:189)
   	at sbt.MainLoop$.process$1(MainLoop.scala:246)
   	at sbt.MainLoop$.processCommand(MainLoop.scala:279)
   	at sbt.MainLoop$.$anonfun$next$5(MainLoop.scala:164)
   	at sbt.State$StateOpsImpl$.runCmd$1(State.scala:290)
   	at sbt.State$StateOpsImpl$.process$extension(State.scala:326)
   	at sbt.MainLoop$.$anonfun$next$4(MainLoop.scala:164)
   	at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:24)
   	at sbt.MainLoop$.next(MainLoop.scala:164)
   	at sbt.MainLoop$.run(MainLoop.scala:145)
   	at sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:120)
   	at sbt.io.Using.apply(Using.scala:28)
   	at sbt.MainLoop$.runWithNewLog(MainLoop.scala:113)
   	at sbt.MainLoop$.runAndClearLast(MainLoop.scala:67)
   	at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:[52](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:53))
   	at sbt.MainLoop$.runLogged(MainLoop.scala:43)
   	at sbt.StandardMain$.runManaged(Main.scala:225)
   	at sbt.xMain$.$anonfun$run$11(Main.scala:135)
   	at sbt.internal.util.Terminal$.withStreams(Terminal.scala:421)
   	at sbt.xMain$.withStreams$1(Main.scala:88)
   	at sbt.xMain$.run(Main.scala:123)
   	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
   	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   	at java.base/java.lang.reflect.Method.invoke(Method.java:[56](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:57)8)
   	at sbt.internal.XMainConfiguration.run(XMainConfiguration.java:[59](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:60))
   	at sbt.xMain.run(Main.scala:47)
   	at xsbt.boot.Launch$.$anonfun$run$1(Launch.scala:149)
   	at xsbt.boot.Launch$.withContextLoader(Launch.scala:176)
   	at xsbt.boot.Launch$.run(Launch.scala:149)
   	at xsbt.boot.Launch$.$anonfun$apply$1(Launch.scala:44)
   	at xsbt.boot.Launch$.launch(Launch.scala:159)
   	at xsbt.boot.Launch$.apply(Launch.scala:44)
   	at xsbt.boot.Launch$.apply(Launch.scala:21)
   	at xsbt.boot.Boot$.runImpl(Boot.scala:78)
   	at xsbt.boot.Boot$.run(Boot.scala:[73](https://github.com/LuciferYang/spark/actions/runs/8337179248/job/22815504538#step:10:74))
   	at xsbt.boot.Boot$.main(Boot.scala:21)
   	at xsbt.boot.Boot.main(Boot.scala)
   [error] java.lang.IllegalArgumentException: requirement failed: Could not rewrite 'customId="println" level="error"' in original scalastyle config.
   [error] Use 'last' for the full log.
   [warn] Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? (default: r)
   ```
   
   The compilation failed, I'll investigate it later.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org