You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2019/03/12 23:04:03 UTC

[spark] branch master updated: [SPARK-27130][BUILD] Automatically select profile when executing sbt-checkstyle

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new dccf661  [SPARK-27130][BUILD] Automatically select profile when executing sbt-checkstyle
dccf661 is described below

commit dccf6615c34c9347e937838742dec88456843f13
Author: Yuming Wang <yu...@ebay.com>
AuthorDate: Wed Mar 13 08:03:46 2019 +0900

    [SPARK-27130][BUILD] Automatically select profile when executing sbt-checkstyle
    
    ## What changes were proposed in this pull request?
    
    This PR makes it automatically select profile when executing `sbt-checkstyle`. The reason for this is that `hadoop-2.7` and `hadoop-3.1` may have different `hive-thriftserver` module in the future.
    
    ## How was this patch tested?
    
    manual tests:
    ```
    Update AbstractService.java file.
    export HADOOP_PROFILE=hadoop2.7
    ./dev/run-tests
    ```
    The result:
    ![image](https://user-images.githubusercontent.com/5399861/54197992-5337e780-4500-11e9-930c-722982cdcd45.png)
    
    Closes #24065 from wangyum/SPARK-27130.
    
    Authored-by: Yuming Wang <yu...@ebay.com>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 dev/run-tests.py   | 10 +++++++---
 dev/sbt-checkstyle | 11 +++--------
 2 files changed, 10 insertions(+), 11 deletions(-)

diff --git a/dev/run-tests.py b/dev/run-tests.py
index 122c5c6..aa106af 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -180,9 +180,13 @@ def run_scala_style_checks():
     run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala")])
 
 
-def run_java_style_checks():
+def run_java_style_checks(build_profiles):
     set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE")
-    run_cmd([os.path.join(SPARK_HOME, "dev", "sbt-checkstyle")])
+    # The same profiles used for building are used to run Checkstyle by SBT as well because
+    # the previous build looks reused for Checkstyle and affecting Checkstyle. See SPARK-27130.
+    profiles = " ".join(build_profiles)
+    print("[info] Checking Java style using SBT with these profiles: ", profiles)
+    run_cmd([os.path.join(SPARK_HOME, "dev", "sbt-checkstyle"), profiles])
 
 
 def run_python_style_checks():
@@ -333,7 +337,7 @@ def build_spark_assembly_sbt(hadoop_version, checkstyle=False):
     exec_sbt(profiles_and_goals)
 
     if checkstyle:
-        run_java_style_checks()
+        run_java_style_checks(build_profiles)
 
     build_spark_unidoc_sbt(hadoop_version)
 
diff --git a/dev/sbt-checkstyle b/dev/sbt-checkstyle
index 18f3bd8..1ecad59 100755
--- a/dev/sbt-checkstyle
+++ b/dev/sbt-checkstyle
@@ -17,17 +17,12 @@
 # limitations under the License.
 #
 
+profiles=${1:-"-Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver"}
+
 # NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file
 # with failure (either resolution or compilation); the "q" makes SBT quit.
 ERRORS=$(echo -e "q\n" \
-    | build/sbt \
-        -Pkinesis-asl \
-        -Pmesos \
-        -Pkubernetes \
-        -Pyarn \
-        -Phive \
-        -Phive-thriftserver \
-        checkstyle test:checkstyle \
+    | build/sbt ${profiles} checkstyle test:checkstyle \
     | awk '{if($1~/error/)print}' \
 )
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org