You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Prabir Ghosh (JIRA)" <ji...@apache.org> on 2017/10/02 18:50:03 UTC

[jira] [Created] (SPARK-22186) Getting this issue while initializing spark on windows

Prabir Ghosh created SPARK-22186:
------------------------------------

             Summary: Getting this issue while initializing spark on windows
                 Key: SPARK-22186
                 URL: https://issues.apache.org/jira/browse/SPARK-22186
             Project: Spark
          Issue Type: Bug
          Components: Build
    Affects Versions: 2.1.1
         Environment: Windows 10, Python 3.6
            Reporter: Prabir Ghosh


Py4JJavaError                             Traceback (most recent call last)
<ipython-input-2-5e2ed3915947> in <module>()
----> 1 student = learner.addmission('problem.json')

C:\Users\prabi\Modeling\app/..\learners.py in __init__(self, problempath)
     67                 self.__identifier__ = identifier
     68 
---> 69                 spark_st = spark_setup(data_config["spark_path"],data_config["py4j_folder"])
     70                 self.__sc__ = spark_st.sc()
     71                 self.__GridSearchCV__ = spark_st.GridSearchCV()

C:\Users\prabi\Modeling\app/..\learners.py in __init__(self, spark_path, py4j_path)
     17         __GridSearchCV__ = None
     18         def __init__(self, spark_path, py4j_path):
---> 19                 __spark__ = spark.Setup(spark_path, py4j_path)
     20                 self.__sc__ = __spark__.SparkContext()
     21                 self.__GridSearchCV__ = __spark__.GridSearchCV()

C:\Users\prabi\Modeling\app/..\School\spark_setup.py in __init__(self, spark_path, py4j_path, app_name)
     33                 from spark_sklearn import GridSearchCV
     34 
---> 35                 self.__sc__ = SparkContext(conf=conf)
     36                 self.__grid_serach__ = GridSearchCV
     37 

/ayata/spark/spark-2.1.1-bin-hadoop2.7/python\pyspark\context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
    116         try:
    117             self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
--> 118                           conf, jsc, profiler_cls)
    119         except:
    120             # If an error occurs, clean up in order to allow future SparkContext creation:

/ayata/spark/spark-2.1.1-bin-hadoop2.7/python\pyspark\context.py in _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, jsc, profiler_cls)
    180 
    181         # Create the Java SparkContext through Py4J
--> 182         self._jsc = jsc or self._initialize_context(self._conf._jconf)
    183         # Reset the SparkConf to the one actually used by the SparkContext in JVM.
    184         self._conf = SparkConf(_jconf=self._jsc.sc().conf())

/ayata/spark/spark-2.1.1-bin-hadoop2.7/python\pyspark\context.py in _initialize_context(self, jconf)
    247         Initialize SparkContext in function to allow subclass specific initialization
    248         """
--> 249         return self._jvm.JavaSparkContext(jconf)
    250 
    251     @classmethod

\ayata\spark\spark-2.1.1-bin-hadoop2.7\python\lib\py4j-0.10.4-src.zip\py4j\java_gateway.py in __call__(self, *args)
   1399         answer = self._gateway_client.send_command(command)
   1400         return_value = get_return_value(
-> 1401             answer, self._gateway_client, None, self._fqn)
   1402 
   1403         for temp_arg in temp_args:

\ayata\spark\spark-2.1.1-bin-hadoop2.7\python\lib\py4j-0.10.4-src.zip\py4j\protocol.py in get_return_value(answer, gateway_client, target_id, name)
    317                 raise Py4JJavaError(
    318                     "An error occurred while calling {0}{1}{2}.\n".
--> 319                     format(target_id, ".", name), value)
    320             else:
    321                 raise Py4JError(

Py4JJavaError: An error occurred while calling None.org.apache.spark.api.java.JavaSparkContext.
: java.lang.ExceptionInInitializerError
	at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:80)
	at org.apache.hadoop.security.SecurityUtil.getAuthenticationMethod(SecurityUtil.java:611)
	at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:273)
	at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:261)
	at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:791)
	at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:761)
	at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:634)
	at org.apache.spark.util.Utils$$anonfun$getCurrentUserName$1.apply(Utils.scala:2391)
	at org.apache.spark.util.Utils$$anonfun$getCurrentUserName$1.apply(Utils.scala:2391)
	at scala.Option.getOrElse(Option.scala:121)
	at org.apache.spark.util.Utils$.getCurrentUserName(Utils.scala:2391)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:295)
	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58)
	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
	at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown Source)
	at java.base/java.lang.reflect.Constructor.newInstance(Unknown Source)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:236)
	at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)
	at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)
	at py4j.GatewayConnection.run(GatewayConnection.java:214)
	at java.base/java.lang.Thread.run(Unknown Source)
Caused by: java.lang.StringIndexOutOfBoundsException: begin 0, end 3, length 1
	at java.base/java.lang.String.checkBoundsBeginEnd(Unknown Source)
	at java.base/java.lang.String.substring(Unknown Source)
	at org.apache.hadoop.util.Shell.<clinit>(Shell.java:52)
	... 24 more





--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org