You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@giraph.apache.org by HariSantosh Rongali <ha...@gmail.com> on 2014/03/13 08:13:29 UTC

Fwd: Giraph job failing to load vertices

Hello Experts,

I am trying to run a Giraph job using custom vertices, custom edges and
customs messages but graph is not allowing to instantiate the vetex and the
job if failing. Here is my input:

[1000,[[],[0],[]],[[2000,[100,10]]]]
[2000,[[],[],[]],[[3000,[75,7]]]]
[3000,[[],[],[]],[[4000,[20.5,5]]]]

Vertex prepresentation:

Vertex id - LongWritable
Vertex value - Custom Writable (ArrayList<LinkedList<String>>)
Edge id - LongWritable
Edge Value - Custom Writable (JSONArray)

Here is my custom Input format:

package org.apache.giraph.examples;

import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;

import org.apache.giraph.edge.Edge;
import org.apache.giraph.edge.EdgeFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.io.formats.TextVertexInputFormat;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.json.JSONArray;
import org.json.JSONException;

import com.google.common.collect.Lists;

public class CustomGenerateVertexInputFormat
        extends
        TextVertexInputFormat<LongWritable, GenealogyCustomVertexValue,
GenealogyCustomEdgeValue> {

    @Override
    public TextVertexReader createVertexReader(InputSplit split,
            TaskAttemptContext context) {

        return new CustomCustomVertexReader();
    }


    class CustomCustomVertexReader
            extends

TextVertexReaderFromEachLineProcessedHandlingExceptions<JSONArray,
JSONException> {

        @Override
        protected JSONArray preprocessLine(Text line) throws JSONException {

            return new JSONArray(line.toString());
        }

        @Override
        protected LongWritable getId(JSONArray jsonVertex)
                throws JSONException, IOException {

            return new LongWritable(jsonVertex.getLong(0));
        }

        @Override
        protected GenealogyCustomVertexValue getValue(JSONArray jsonVertex)
                throws JSONException, IOException {

            GenealogyCustomVertexValue verval = new
GenealogyCustomVertexValue();
            JSONArray vervalArray = jsonVertex.getJSONArray(1);


            JSONArray lotArray = vervalArray.getJSONArray(0);
            JSONArray amountArray = vervalArray.getJSONArray(1);
            JSONArray dayArray = vervalArray.getJSONArray(2);

            ArrayList<LinkedList<String>> list = new
ArrayList<LinkedList<String>>();
            LinkedList<String> lotList = new LinkedList<String>();
            for(int i=0;i<lotArray.length();i++){
                lotList.add(lotArray.getString(i));
            }
            list.add(lotList);

            LinkedList<String> amountList = new LinkedList<String>();
            for(int i=0;i<amountArray.length();i++){
                amountList.add(amountArray.getString(i));
            }
            list.add(amountList);

            LinkedList<String> dayList = new LinkedList<String>();
            for(int i=0;i<dayArray.length();i++){
                dayList.add(dayArray.getString(i));
            }
            list.add(dayList);

            verval.setValues(list);

            return verval;

        }

        @Override
        protected Iterable<Edge<LongWritable, GenealogyCustomEdgeValue>>
getEdges(
                JSONArray jsonVertex) throws JSONException, IOException {


            JSONArray jsonEdgeArray = jsonVertex.getJSONArray(2);


            List<Edge<LongWritable, GenealogyCustomEdgeValue>> edges = Lists
                    .newArrayListWithCapacity(jsonEdgeArray.length());


            for (int i = 0; i < jsonEdgeArray.length(); ++i) {
                JSONArray jsonEdge = jsonEdgeArray.getJSONArray(i);


                edges.add(EdgeFactory.create(
                        new LongWritable(jsonEdge.getLong(0)),
                        new
GenealogyCustomEdgeValue(jsonEdge.getJSONArray(1))));

            }

            return edges;
        }

        @Override
        protected Vertex<LongWritable, GenealogyCustomVertexValue,
GenealogyCustomEdgeValue> handleException(
                Text line, JSONArray jsonVertex, JSONException e) {
            throw new IllegalArgumentException("Couldn't get vertex from
line "
                    + line, e);
        }

    }
}

Custom Vertex class:

package org.apache.giraph.examples;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;

import org.apache.hadoop.io.Writable;

public class CustomCustomVertexValue implements Writable{


    public ArrayList<LinkedList<String>> values;



    public ArrayList<LinkedList<String>> getValues() {
        return values;
    }

    public void setValues(ArrayList<LinkedList<String>> values) {
        this.values = values;
    }

    public GenealogyCustomVertexValue() {
        this.values = new ArrayList<LinkedList<String>>();
    }

    public GenealogyCustomVertexValue(ArrayList<LinkedList<String>> values)
{
        this.values = values;
    }


    public void readFields(DataInput input) throws IOException {

        int arrayListSize = input.readInt();
        if (arrayListSize != 3) {
            System.out.println("Arraylist size is not 3");
            return;
        }

        for (int j = 0; j < arrayListSize; j++) {
            int inListSize = input.readInt();
            LinkedList<String> list = new LinkedList<String>();

            for (int i = 0; i < inListSize; i++) {
                list.add(input.readUTF());
            }
            values.add(list);
        }

    }

    public void write(DataOutput output) throws IOException {
        // ArrayList size
        int size = this.values.size();
        // Write ArrayList size
        output.writeInt(size);

        for (LinkedList<String> list : values) {
            int listSize = list.size();
            output.writeInt(listSize);
            for (int i = 0; i < listSize; i++) {

                output.writeUTF(list.get(i));
            }

        }

    }

}



Error stack trace:

2014-03-12 23:34:14,032 INFO org.apache.hadoop.util.NativeCodeLoader:
Loaded the native-hadoop library
2014-03-12 23:34:14,902 INFO org.apache.giraph.graph.GraphTaskManager:
setup: Log level remains at info
2014-03-12 23:34:14,989 INFO org.apache.giraph.graph.GraphTaskManager:
Distributed cache is empty. Assuming fatjar.
2014-03-12 23:34:14,989 INFO org.apache.giraph.graph.GraphTaskManager:
setup: classpath @
/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/jars/job.jar
for job Giraph: org.apache.giraph.examples.JnJGenealogyGeneration
2014-03-12 23:34:15,044 INFO org.apache.giraph.zk.ZooKeeperManager:
createCandidateStamp: Made the directory
_bsp/_defaultZkManagerDir/job_201403122301_0003
2014-03-12 23:34:15,047 INFO org.apache.giraph.zk.ZooKeeperManager:
createCandidateStamp: Made the directory
_bsp/_defaultZkManagerDir/job_201403122301_0003/_zkServer
2014-03-12 23:34:15,049 INFO org.apache.giraph.zk.ZooKeeperManager:
createCandidateStamp: Creating my filestamp
_bsp/_defaultZkManagerDir/job_201403122301_0003/_task/centoshost 1
2014-03-12 23:34:15,075 INFO org.apache.giraph.zk.ZooKeeperManager:
getZooKeeperServerList: For task 1, got file 'zkServerList_centoshost 0 '
(polling period is 3000)
2014-03-12 23:34:15,075 INFO org.apache.giraph.zk.ZooKeeperManager:
getZooKeeperServerList: Found [centoshost, 0] 2 hosts in filename
'zkServerList_centoshost 0 '
2014-03-12 23:34:15,077 INFO org.apache.giraph.zk.ZooKeeperManager:
onlineZooKeeperServers: Got [centoshost] 1 hosts from 1 ready servers when
1 required (polling period is 3000) on attempt 0
2014-03-12 23:34:15,078 INFO org.apache.giraph.graph.GraphTaskManager:
setup: Starting up BspServiceWorker...
2014-03-12 23:34:15,110 INFO org.apache.giraph.bsp.BspService: BspService:
Path to create to halt is /_hadoopBsp/job_201403122301_0003/_haltComputation
2014-03-12 23:34:15,110 INFO org.apache.giraph.bsp.BspService: BspService:
Connecting to ZooKeeper with job job_201403122301_0003, 1 on
centoshost:22181
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:zookeeper.version=3.4.5-1392090, built on 09/30/2012 17:52 GMT
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:host.name=centoshost
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.version=1.7.0_51
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.vendor=Oracle Corporation
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.home=/usr/local/jdk1.7.0_51/jre
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.class.path=/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/jars/classes:/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/jars:/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/attempt_201403122301_0003_m_000001_0/work:/usr/local/hadoop/bin/../conf:/usr/local/jdk1.7.0_51/lib/tools.jar:/usr/local/hadoop/bin/..:/usr/local/hadoop/bin/../hadoop-core-0.20.203.0.jar:/usr/local/hadoop/bin/../lib/aspectjrt-1.6.5.jar:/usr/local/hadoop/bin/../lib/aspectjtools-1.6.5.jar:/usr/local/hadoop/bin/../lib/commons-beanutils-1.7.0.jar:/usr/local/hadoop/bin/../lib/commons-beanutils-core-1.8.0.jar:/usr/local/hadoop/bin/../lib/commons-cli-1.2.jar:/usr/local/hadoop/bin/../lib/commons-codec-1.4.jar:/usr/local/hadoop/bin/../lib/commons-collections-3.2.1.jar:/usr/local/hadoop/bin/../lib/commons-configuration-1.6.jar:/usr/local/hadoop/bin/../lib/commons-daemon-1.0.1.jar:/usr/local/hadoop/bin/../lib/commons-digester-1.8.jar:/usr/local/hadoop/bin/../lib/commons-el-1.0.jar:/usr/local/hadoop/bin/../lib/commons-httpclient-3.0.1.jar:/usr/local/hadoop/bin/../lib/commons-lang-2.4.jar:/usr/local/hadoop/bin/../lib/commons-logging-1.1.1.jar:/usr/local/hadoop/bin/../lib/commons-logging-api-1.0.4.jar:/usr/local/hadoop/bin/../lib/commons-math-2.1.jar:/usr/local/hadoop/bin/../lib/commons-net-1.4.1.jar:/usr/local/hadoop/bin/../lib/core-3.1.1.jar:/usr/local/hadoop/bin/../lib/hsqldb-1.8.0.10.jar:/usr/local/hadoop/bin/../lib/jackson-core-asl-1.0.1.jar:/usr/local/hadoop/bin/../lib/jackson-mapper-asl-1.0.1.jar:/usr/local/hadoop/bin/../lib/jasper-compiler-5.5.12.jar:/usr/local/hadoop/bin/../lib/jasper-runtime-5.5.12.jar:/usr/local/hadoop/bin/../lib/jets3t-0.6.1.jar:/usr/local/hadoop/bin/../lib/jetty-6.1.26.jar:/usr/local/hadoop/bin/../lib/jetty-util-6.1.26.jar:/usr/local/hadoop/bin/../lib/jsch-0.1.42.jar:/usr/local/hadoop/bin/../lib/junit-4.5.jar:/usr/local/hadoop/bin/../lib/kfs-0.2.2.jar:/usr/local/hadoop/bin/../lib/log4j-1.2.15.jar:/usr/local/hadoop/bin/../lib/mockito-all-1.8.5.jar:/usr/local/hadoop/bin/../lib/oro-2.0.8.jar:/usr/local/hadoop/bin/../lib/servlet-api-2.5-20081211.jar:/usr/local/hadoop/bin/../lib/slf4j-api-1.4.3.jar:/usr/local/hadoop/bin/../lib/slf4j-log4j12-1.4.3.jar:/usr/local/hadoop/bin/../lib/xmlenc-0.52.jar:/usr/local/hadoop/bin/../lib/jsp-2.1/jsp-2.1.jar:/usr/local/hadoop/bin/../lib/jsp-2.1/jsp-api-2.1.jar
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.library.path=/usr/local/hadoop/bin/../lib/native/Linux-amd64-64:/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/attempt_201403122301_0003_m_000001_0/work
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.io.tmpdir=/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/attempt_201403122301_0003_m_000001_0/work/tmp
2014-03-12 23:34:15,124 INFO org.apache.zookeeper.ZooKeeper: Client
environment:java.compiler=<NA>
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:os.name=Linux
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:os.arch=amd64
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:os.version=2.6.32-431.el6.x86_64
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:user.name=hduser
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:user.home=/home/hduser
2014-03-12 23:34:15,128 INFO org.apache.zookeeper.ZooKeeper: Client
environment:user.dir=/usr/local/tmp/mapred/local/taskTracker/hduser/jobcache/job_201403122301_0003/attempt_201403122301_0003_m_000001_0/work
2014-03-12 23:34:15,129 INFO org.apache.zookeeper.ZooKeeper: Initiating
client connection, connectString=centoshost:22181 sessionTimeout=60000
watcher=org.apache.giraph.worker.BspServiceWorker@626d26e
2014-03-12 23:34:15,156 INFO org.apache.zookeeper.ClientCnxn: Opening
socket connection to server centoshost/172.16.226.200:22181. Will not
attempt to authenticate using SASL (unknown error)
2014-03-12 23:34:15,160 INFO org.apache.zookeeper.ClientCnxn: Socket
connection established to centoshost/172.16.226.200:22181, initiating
session
2014-03-12 23:34:15,172 INFO org.apache.zookeeper.ClientCnxn: Session
establishment complete on server centoshost/172.16.226.200:22181, sessionid
= 0x144ba1e19dc0004, negotiated timeout = 600000
2014-03-12 23:34:15,178 INFO org.apache.giraph.bsp.BspService: process:
Asynchronous connection complete.
2014-03-12 23:34:15,319 INFO org.apache.giraph.comm.netty.NettyServer:
NettyServer: Using execution group with 8 threads for requestFrameDecoder.
2014-03-12 23:34:15,359 WARN org.apache.giraph.comm.netty.NettyServer:
start: Likely failed to bind on attempt 0 to port 30001
2014-03-12 23:34:15,360 INFO org.apache.giraph.comm.netty.NettyServer:
start: Started server communication server:
centoshost/172.16.226.200:30011with up to 16 threads on bind attempt 1
with sendBufferSize = 32768
receiveBufferSize = 524288
2014-03-12 23:34:15,363 INFO org.apache.giraph.comm.netty.NettyClient:
NettyClient: Using execution handler with 8 threads after request-encoder.
2014-03-12 23:34:15,394 INFO org.apache.giraph.graph.GraphTaskManager:
setup: Registering health of this worker...
2014-03-12 23:34:15,406 INFO org.apache.giraph.bsp.BspService: getJobState:
Job state already exists (/_hadoopBsp/job_201403122301_0003/_masterJobState)
2014-03-12 23:34:15,409 INFO org.apache.giraph.bsp.BspService:
getApplicationAttempt: Node
/_hadoopBsp/job_201403122301_0003/_applicationAttemptsDir already exists!
2014-03-12 23:34:15,415 INFO org.apache.giraph.bsp.BspService:
getApplicationAttempt: Node
/_hadoopBsp/job_201403122301_0003/_applicationAttemptsDir already exists!
2014-03-12 23:34:15,425 INFO org.apache.giraph.worker.BspServiceWorker:
registerHealth: Created my health node for attempt=0, superstep=-1 with
/_hadoopBsp/job_201403122301_0003/_applicationAttemptsDir/0/_superstepDir/-1/_workerHealthyDir/centoshost_1
and workerInfo= Worker(hostname=centoshost, MRtaskID=1, port=30011)
2014-03-12 23:34:15,490 INFO org.apache.giraph.comm.netty.NettyServer:
start: Using Netty without authentication.
2014-03-12 23:34:15,502 INFO org.apache.giraph.bsp.BspService: process:
partitionAssignmentsReadyChanged (partitions are assigned)
2014-03-12 23:34:15,506 INFO org.apache.giraph.worker.BspServiceWorker:
startSuperstep: Master(hostname=centoshost, MRtaskID=0, port=30010)
2014-03-12 23:34:15,506 INFO org.apache.giraph.worker.BspServiceWorker:
startSuperstep: Ready for computation on superstep -1 since worker
selection and vertex range assignments are done in
/_hadoopBsp/job_201403122301_0003/_applicationAttemptsDir/0/_superstepDir/-1/_addressesAndPartitions
2014-03-12 23:34:15,524 INFO org.apache.giraph.comm.netty.NettyClient:
Using Netty without authentication.
2014-03-12 23:34:15,556 INFO org.apache.giraph.comm.netty.NettyClient:
connectAllAddresses: Successfully added 1 connections, (1 total connected)
0 failed, 0 failures total.
2014-03-12 23:34:15,578 INFO
org.apache.giraph.comm.netty.handler.RequestDecoder: decode: Server window
metrics MBytes/sec received = 0, MBytesReceived = 0.0001, ave received req
MBytes = 0.0001, secs waited = 1.39469248E9
2014-03-12 23:34:15,594 INFO org.apache.giraph.worker.BspServiceWorker:
loadInputSplits: Using 1 thread(s), originally 1 threads(s) for 1 total
splits.
2014-03-12 23:34:15,623 INFO org.apache.giraph.worker.InputSplitsHandler:
reserveInputSplit: Reserved input split path
/_hadoopBsp/job_201403122301_0003/_vertexInputSplitDir/0, overall roughly
0.0% input splits reserved
2014-03-12 23:34:15,624 INFO org.apache.giraph.worker.InputSplitsCallable:
getInputSplit: Reserved
/_hadoopBsp/job_201403122301_0003/_vertexInputSplitDir/0 from ZooKeeper and
got input split
'hdfs://centoshost:54310/usr/hrongali/input/geninput.txt:0+107'
2014-03-12 23:34:15,658 ERROR
org.apache.giraph.utils.LogStacktraceCallable: Execution of callable failed
java.lang.ClassCastException: java.lang.NullPointerException cannot be cast
to org.json.JSONException
    at
org.apache.giraph.examples.CustomGenerateVertexInputFormat$CustomCustomVertexReader.handleException(CustomGenerateVertexInputFormat.java:63)
    at
org.apache.giraph.io.formats.TextVertexInputFormat$TextVertexReaderFromEachLineProcessedHandlingExceptions.getCurrentVertex(TextVertexInputFormat.java:338)
    at
org.apache.giraph.io.internal.WrappedVertexReader.getCurrentVertex(WrappedVertexReader.java:89)
    at
org.apache.giraph.worker.VertexInputSplitsCallable.readInputSplit(VertexInputSplitsCallable.java:148)
    at
org.apache.giraph.worker.InputSplitsCallable.loadInputSplit(InputSplitsCallable.java:267)
    at
org.apache.giraph.worker.InputSplitsCallable.call(InputSplitsCallable.java:211)
    at
org.apache.giraph.worker.InputSplitsCallable.call(InputSplitsCallable.java:60)
    at
org.apache.giraph.utils.LogStacktraceCallable.call(LogStacktraceCallable.java:51)
    at java.util.concurrent.FutureTask.run(FutureTask.java:262)
    at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:744)
2014-03-12 23:34:15,664 ERROR org.apache.giraph.worker.BspServiceWorker:
unregisterHealth: Got failure, unregistering health on
/_hadoopBsp/job_201403122301_0003/_applicationAttemptsDir/0/_superstepDir/-1/_workerHealthyDir/centoshost_1
on superstep -1
2014-03-12 23:34:15,715 INFO org.apache.hadoop.mapred.TaskLogsTruncater:
Initializing logs' truncater with mapRetainSize=-1 and reduceRetainSize=-1
2014-03-12 23:34:15,784 INFO org.apache.hadoop.io.nativeio.NativeIO:
Initialized cache for UID to User mapping with a cache timeout of 14400
seconds.
2014-03-12 23:34:15,784 INFO org.apache.hadoop.io.nativeio.NativeIO: Got
UserName hduser for UID 501 from the native implementation
2014-03-12 23:34:15,794 WARN org.apache.hadoop.mapred.Child: Error running
child
java.lang.IllegalStateException: run: Caught an unrecoverable exception
waitFor: ExecutionException occurred while waiting for
org.apache.giraph.utils.ProgressableUtils$FutureWaitable@315e46fe
    at org.apache.giraph.graph.GraphMapper.run(GraphMapper.java:101)
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:763)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:369)
    at org.apache.hadoop.mapred.Child$4.run(Child.java:259)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:415)
    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
    at org.apache.hadoop.mapred.Child.main(Child.java:253)
Caused by: java.lang.IllegalStateException: waitFor: ExecutionException
occurred while waiting for
org.apache.giraph.utils.ProgressableUtils$FutureWaitable@315e46fe
    at
org.apache.giraph.utils.ProgressableUtils.waitFor(ProgressableUtils.java:193)
    at
org.apache.giraph.utils.ProgressableUtils.waitForever(ProgressableUtils.java:151)
    at
org.apache.giraph.utils.ProgressableUtils.waitForever(ProgressableUtils.java:136)
    at
org.apache.giraph.utils.ProgressableUtils.getFutureResult(ProgressableUtils.java:99)
    at
org.apache.giraph.utils.ProgressableUtils.getResultsWithNCallables(ProgressableUtils.java:233)
    at
org.apache.giraph.worker.BspServiceWorker.loadInputSplits(BspServiceWorker.java:284)
    at
org.apache.giraph.worker.BspServiceWorker.loadVertices(BspServiceWorker.java:328)
    at
org.apache.giraph.worker.BspServiceWorker.setup(BspServiceWorker.java:509)
    at
org.apache.giraph.graph.GraphTaskManager.execute(GraphTaskManager.java:261)
    at org.apache.giraph.graph.GraphMapper.run(GraphMapper.java:91)
    ... 7 more
Caused by: java.util.concurrent.ExecutionException:
java.lang.ClassCastException: java.lang.NullPointerException cannot be cast
to org.json.JSONException
    at java.util.concurrent.FutureTask.report(FutureTask.java:122)
    at java.util.concurrent.FutureTask.get(FutureTask.java:202)
    at
org.apache.giraph.utils.ProgressableUtils$FutureWaitable.waitFor(ProgressableUtils.java:312)
    at
org.apache.giraph.utils.ProgressableUtils.waitFor(ProgressableUtils.java:185)
    ... 16 more
Caused by: java.lang.ClassCastException: java.lang.NullPointerException
cannot be cast to org.json.JSONException
    at
org.apache.giraph.examples.GenealogyGenerateVertexInputFormat$GenealogyCustomVertexReader.handleException(GenealogyGenerateVertexInputFormat.java:63)
    at
org.apache.giraph.io.formats.TextVertexInputFormat$TextVertexReaderFromEachLineProcessedHandlingExceptions.getCurrentVertex(TextVertexInputFormat.java:338)
    at
org.apache.giraph.io.internal.WrappedVertexReader.getCurrentVertex(WrappedVertexReader.java:89)
    at
org.apache.giraph.worker.VertexInputSplitsCallable.readInputSplit(VertexInputSplitsCallable.java:148)
    at
org.apache.giraph.worker.InputSplitsCallable.loadInputSplit(InputSplitsCallable.java:267)
    at
org.apache.giraph.worker.InputSplitsCallable.call(InputSplitsCallable.java:211)
    at
org.apache.giraph.worker.InputSplitsCallable.call(InputSplitsCallable.java:60)
    at
org.apache.giraph.utils.LogStacktraceCallable.call(LogStacktraceCallable.java:51)
    at java.util.concurrent.FutureTask.run(FutureTask.java:262)
    at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:744)
2014-03-12 23:34:15,810 INFO org.apache.hadoop.mapred.Task: Runnning
cleanup for the task


-- 
Thanks & Regards
Hari



-- 
Thanks & Regards
Hari Santosh Rongali
Ph:  201-233-8024