You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2024/01/30 00:48:09 UTC

(spark) branch master updated: [SPARK-46910][PYTHON] Eliminate JDK Requirement in PySpark Installation

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 83fad32f1c68 [SPARK-46910][PYTHON] Eliminate JDK Requirement in PySpark Installation
83fad32f1c68 is described below

commit 83fad32f1c68c991cdeaead5e14052cdac89f3b7
Author: Amanda Liu <am...@databricks.com>
AuthorDate: Tue Jan 30 09:47:56 2024 +0900

    [SPARK-46910][PYTHON] Eliminate JDK Requirement in PySpark Installation
    
    ### What changes were proposed in this pull request?
    Modifies the PySpark installation script to ask users to allow installation of the necessary JDK, if not already installed.
    
    ### Why are the changes needed?
    Simplifying the PySpark installation process is a critical part of improving the new user onboarding experience. Many new PySpark users get blocked in the installation process, due to confusing errors from not having Java installed. This change simplifies the PySpark user onboarding process.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, modifies the PySpark installation script.
    
    ### How was this patch tested?
    Installing PySpark in virtual environments
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44940 from asl3/jdk-install.
    
    Lead-authored-by: Amanda Liu <am...@databricks.com>
    Co-authored-by: Hyukjin Kwon <gu...@gmail.com>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 bin/pyspark | 27 +++++++++++++++++++++++++++
 1 file changed, 27 insertions(+)

diff --git a/bin/pyspark b/bin/pyspark
index 1ae28b1f507c..2f08f7836915 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -48,6 +48,33 @@ export PYSPARK_PYTHON
 export PYSPARK_DRIVER_PYTHON
 export PYSPARK_DRIVER_PYTHON_OPTS
 
+# Attempt to find JAVA_HOME.
+# If JAVA_HOME not set, install JDK 17 and set JAVA_HOME using a temp dir, and adding the
+# temp dir to the PYTHONPATH.
+if [ -n "${JAVA_HOME}" ]; then
+  RUNNER="${JAVA_HOME}/bin/java"
+else
+  if [ "$(command -v java)" ]; then
+    RUNNER="java"
+  else
+    echo -n "JAVA_HOME is not set. Would you like to install JDK 17 and set JAVA_HOME? (Y/N) " >&2
+
+    read -r input
+
+    if [[ "${input,,}" == "y" ]]; then
+        TEMP_DIR=$(mktemp -d)
+        $PYSPARK_DRIVER_PYTHON -m pip install --target="$TEMP_DIR" install-jdk
+        export JAVA_HOME=$(PYTHONPATH="$TEMP_DIR" $PYSPARK_DRIVER_PYTHON -c 'import jdk; print(jdk.install("17"))')
+        RUNNER="${JAVA_HOME}/bin/java"
+        echo "JDK was installed to the path \"$JAVA_HOME\""
+        echo "You can avoid needing to re-install JDK by setting your JAVA_HOME environment variable to \"$JAVA_HOME\""
+    else
+        echo "JDK installation skipped. You can manually install JDK (17 or later) and set JAVA_HOME in your environment."
+        exit 1
+    fi
+  fi
+fi
+
 # Add the PySpark classes to the Python path:
 export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
 export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org