You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/04/25 09:33:54 UTC

spark git commit: [SPARK-14790] Always run scalastyle on sbt compile and test

Repository: spark
Updated Branches:
  refs/heads/master cbdcd4eda -> 761fc46c7


[SPARK-14790] Always run scalastyle on sbt compile and test

## What changes were proposed in this pull request?

Sbt compile and test should also run scalastyle. This makes it less likely you forget to run scalastyle and fail in jenkins. Scalastyle results are cached for efficiency.

This patch was originally written by ahirreddy; I just fixed it up to work with scalastyle 0.8.0.

## How was this patch tested?

Tested manually with `build/sbt package`.

Author: Eric Liang <ek...@databricks.com>

Closes #12555 from ericl/scalastyle.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/761fc46c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/761fc46c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/761fc46c

Branch: refs/heads/master
Commit: 761fc46c70f15ffc3f35ff87b6a5c9192af1c55f
Parents: cbdcd4e
Author: Eric Liang <ek...@databricks.com>
Authored: Mon Apr 25 00:33:51 2016 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Mon Apr 25 00:33:51 2016 -0700

----------------------------------------------------------------------
 .gitignore               |  1 +
 project/SparkBuild.scala | 84 ++++++++++++++++++++++++++++++++++++++++++-
 2 files changed, 84 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/761fc46c/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 05afbb5..2b041e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,6 +20,7 @@ out/
 build/apache-maven*
 build/zinc*
 build/scala*
+scalastyle-on-compile.generated.xml
 conf/java-opts
 conf/*.sh
 conf/*.cmd

http://git-wip-us.apache.org/repos/asf/spark/blob/761fc46c/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 9e6d341..ffbca25 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -18,6 +18,7 @@
 import java.io._
 import java.nio.file.Files
 
+import scala.io.Source
 import scala.util.Properties
 import scala.collection.JavaConverters._
 import scala.collection.mutable.Stack
@@ -29,6 +30,8 @@ import sbtunidoc.Plugin.UnidocKeys.unidocGenjavadocVersion
 import com.simplytyped.Antlr4Plugin._
 import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys}
 import com.typesafe.tools.mima.plugin.MimaKeys
+import org.scalastyle.sbt.ScalastylePlugin._
+import org.scalastyle.sbt.Tasks
 
 import spray.revolver.RevolverPlugin._
 
@@ -147,7 +150,86 @@ object SparkBuild extends PomBuild {
       "org.spark-project" %% "genjavadoc-plugin" % unidocGenjavadocVersion.value cross CrossVersion.full),
     scalacOptions <+= target.map(t => "-P:genjavadoc:out=" + (t / "java")))
 
-  lazy val sharedSettings = sparkGenjavadocSettings ++ Seq (
+  lazy val scalaStyleRules = Project("scalaStyleRules", file("scalastyle"))
+    .settings(
+      libraryDependencies += "org.scalastyle" %% "scalastyle" % "0.8.0"
+    )
+
+  lazy val scalaStyleOnCompile = taskKey[Unit]("scalaStyleOnCompile")
+
+  lazy val scalaStyleOnTest = taskKey[Unit]("scalaStyleOnTest")
+
+  // We special case the 'println' lint rule to only be a warning on compile, because adding
+  // printlns for debugging is a common use case and is easy to remember to remove.
+  val scalaStyleOnCompileConfig: String = {
+    val in = "scalastyle-config.xml"
+    val out = "scalastyle-on-compile.generated.xml"
+    val replacements = Map(
+      """customId="println" level="error"""" -> """customId="println" level="warn""""
+    )
+    var contents = Source.fromFile(in).getLines.mkString("\n")
+    for ((k, v) <- replacements) {
+      require(contents.contains(k), s"Could not rewrite '$k' in original scalastyle config.")
+      contents = contents.replace(k, v)
+    }
+    new PrintWriter(out) {
+      write(contents)
+      close()
+    }
+    out
+  }
+
+  // Return a cached scalastyle task for a given configuration (usually Compile or Test)
+  private def cachedScalaStyle(config: Configuration) = Def.task {
+    val logger = streams.value.log
+    // We need a different cache dir per Configuration, otherwise they collide
+    val cacheDir = target.value / s"scalastyle-cache-${config.name}"
+    val cachedFun = FileFunction.cached(cacheDir, FilesInfo.lastModified, FilesInfo.exists) {
+      (inFiles: Set[File]) => {
+        val args: Seq[String] = Seq.empty
+        val scalaSourceV = Seq(file(scalaSource.in(config).value.getAbsolutePath))
+        val configV = (baseDirectory in ThisBuild).value / scalaStyleOnCompileConfig
+        val configUrlV = scalastyleConfigUrl.in(config).value
+        val streamsV = streams.in(config).value
+        val failOnErrorV = true
+        val scalastyleTargetV = scalastyleTarget.in(config).value
+        val configRefreshHoursV = scalastyleConfigRefreshHours.in(config).value
+        val targetV = target.in(config).value
+        val configCacheFileV = scalastyleConfigUrlCacheFile.in(config).value
+
+        logger.info(s"Running scalastyle on ${name.value} in ${config.name}")
+        Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, scalaSourceV, scalastyleTargetV,
+          streamsV, configRefreshHoursV, targetV, configCacheFileV)
+
+        Set.empty
+      }
+    }
+
+    cachedFun(findFiles(scalaSource.in(config).value))
+  }
+
+  private def findFiles(file: File): Set[File] = file.isDirectory match {
+    case true => file.listFiles().toSet.flatMap(findFiles) + file
+    case false => Set(file)
+  }
+
+  def enableScalaStyle: Seq[sbt.Def.Setting[_]] = Seq(
+    scalaStyleOnCompile := cachedScalaStyle(Compile).value,
+    scalaStyleOnTest := cachedScalaStyle(Test).value,
+    logLevel in scalaStyleOnCompile := Level.Warn,
+    logLevel in scalaStyleOnTest := Level.Warn,
+    (compile in Compile) := {
+      scalaStyleOnCompile.value
+      (compile in Compile).value
+    },
+    (compile in Test) := {
+      scalaStyleOnTest.value
+      (compile in Test).value
+    }
+  )
+
+  lazy val sharedSettings = sparkGenjavadocSettings ++
+      (if (sys.env.contains("NOLINT_ON_COMPILE")) Nil else enableScalaStyle) ++ Seq(
     exportJars in Compile := true,
     exportJars in Test := false,
     javaHome := sys.env.get("JAVA_HOME")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org