You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by zitang qin <zi...@gmail.com> on 2017/02/05 22:45:08 UTC
Invalid checkpoint file on spark 1.6.2
hello every one,
I was running below code on spark 1.6.2 , could anyone help with the error
message , much appreciated
df_batch_rdd=hc.read.option("basePath",
T_CF_Prefix).parquet(*tcfFolderString).rdd
df_batch_rdd.cache()
df_batch_rdd.checkpoint()
print str(df_batch_rdd.count()) +" "+
str(datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S'))
df_batch=df_batch_rdd.toDF()
df_batch.registerTempTable("df_batch")
Py4JJavaError: An error occurred while calling
z:org.apache.spark.api.python.PythonRDD.collectAndServe.
: org.apache.spark.SparkException: Invalid checkpoint file:
hdfs://x.x.x.x:8020/data/chkpoint/4adac5e6-2bac-44cf-a2e7-f9c155ff47cc/rdd-1727/part-100000
at
org.apache.spark.rdd.ReliableCheckpointRDD$$anonfun$getPartitions$1.apply(ReliableCheckpointRDD.scala:75)
at
org.apache.spark.rdd.ReliableCheckpointRDD$$anonfun$getPartitions$1.apply(ReliableCheckpointRDD.scala:73)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at
scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
at
org.apache.spark.rdd.ReliableCheckpointRDD.getPartitions(ReliableCheckpointRDD.scala:73)
at
org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:242)
at
org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:240)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:240)
at
org.apache.spark.rdd.ReliableCheckpointRDD$.writeRDDToCheckpointDirectory(ReliableCheckpointRDD.scala:144)
at
org.apache.spark.rdd.ReliableRDDCheckpointData.doCheckpoint(ReliableRDDCheckpointData.scala:58)
at
org.apache.spark.rdd.RDDCheckpointData.checkpoint(RDDCheckpointData.scala:74)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply$mcV$sp(RDD.scala:1705)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply(RDD.scala:1695)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply(RDD.scala:1695)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDD.doCheckpoint(RDD.scala:1694)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1$$anonfun$apply$mcV$sp$2.apply(RDD.scala:1707)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1$$anonfun$apply$mcV$sp$2.apply(RDD.scala:1707)
at scala.collection.immutable.List.foreach(List.scala:318)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply$mcV$sp(RDD.scala:1707)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply(RDD.scala:1695)
at
org.apache.spark.rdd.RDD$$anonfun$doCheckpoint$1.apply(RDD.scala:1695)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDD.doCheckpoint(RDD.scala:1694)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1869)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1882)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1953)
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:934)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:323)
at org.apache.spark.rdd.RDD.collect(RDD.scala:933)
at
org.apache.spark.api.python.PythonRDD$.collectAndServe(PythonRDD.scala:405)
at
org.apache.spark.api.python.PythonRDD.collectAndServe(PythonRDD.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
at
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:381)
at py4j.Gateway.invoke(Gateway.java:259)
at
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:209)
at java.lang.Thread.run(Thread.java:745)