You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@pig.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/03/08 11:40:02 UTC

Build failed in Jenkins: Pig-trunk #1202

See <https://builds.apache.org/job/Pig-trunk/1202/changes>

Changes:

[daijy] Fix TestRegisteredJarVisibility on 23

[daijy] PIG-2572: e2e harness deploy fails when using pig that does not bundle hadoop

------------------------------------------
[...truncated 51092 lines...]
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/08 10:35:58 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 2
    [junit] 12/03/08 10:35:58 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/08 10:35:58 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:35969 to delete  blk_4660125593373503779_1095 blk_-5887817452249970228_1101 blk_2883712113113399904_1102
    [junit] 12/03/08 10:35:58 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:48535 to delete  blk_4660125593373503779_1095 blk_-5887817452249970228_1101
    [junit] 12/03/08 10:35:58 INFO ipc.Server: Stopping server on 47879
    [junit] 12/03/08 10:35:58 INFO ipc.Server: IPC Server handler 0 on 47879: exiting
    [junit] 12/03/08 10:35:58 INFO ipc.Server: IPC Server handler 1 on 47879: exiting
    [junit] 12/03/08 10:35:58 INFO ipc.Server: IPC Server handler 2 on 47879: exiting
    [junit] 12/03/08 10:35:58 INFO ipc.Server: Stopping IPC Server listener on 47879
    [junit] 12/03/08 10:35:58 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/08 10:35:58 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:35:58 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/08 10:35:58 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:35969, storageID=DS-1144620544-67.195.138.20-35969-1331202411802, infoPort=42102, ipcPort=47879):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/08 10:35:58 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/08 10:35:59 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/08 10:35:59 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:35:59 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:35969, storageID=DS-1144620544-67.195.138.20-35969-1331202411802, infoPort=42102, ipcPort=47879):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/08 10:35:59 INFO ipc.Server: Stopping server on 47879
    [junit] 12/03/08 10:35:59 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:35:59 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:35:59 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/08 10:35:59 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/08 10:35:59 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-535435478
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-535435478
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] Shutting down DataNode 1
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/08 10:35:59 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/08 10:35:59 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping server on 49618
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 0 on 49618: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 2 on 49618: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 1 on 49618: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping IPC Server listener on 49618
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/08 10:36:00 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:36:00 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:36901, storageID=DS-1967685265-67.195.138.20-36901-1331202411463, infoPort=52834, ipcPort=49618):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:36:00 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:36901, storageID=DS-1967685265-67.195.138.20-36901-1331202411463, infoPort=52834, ipcPort=49618):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping server on 49618
    [junit] 12/03/08 10:36:00 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:36:00 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/08 10:36:00 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/08 10:36:00 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId414943297
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId414943297
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] Shutting down DataNode 0
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/08 10:36:00 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/08 10:36:00 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping server on 38499
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 1 on 38499: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 0 on 38499: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: IPC Server handler 2 on 38499: exiting
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping IPC Server listener on 38499
    [junit] 12/03/08 10:36:00 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/08 10:36:00 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:60542, storageID=DS-1910821457-67.195.138.20-60542-1331202411099, infoPort=34533, ipcPort=38499):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/08 10:36:00 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:60542, storageID=DS-1910821457-67.195.138.20-60542-1331202411099, infoPort=34533, ipcPort=38499):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/08 10:36:00 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/08 10:36:00 INFO ipc.Server: Stopping server on 38499
    [junit] 12/03/08 10:36:00 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:36:00 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:36:00 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/08 10:36:00 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/08 10:36:01 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/08 10:36:01 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1346918925
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1346918925
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/08 10:36:01 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/08 10:36:01 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/08 10:36:01 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/08 10:36:01 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/08 10:36:01 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 14Number of transactions batched in Syncs: 160 Number of syncs: 350 SyncTimes(ms): 2866 903 
    [junit] 12/03/08 10:36:01 INFO ipc.Server: Stopping server on 33788
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 0 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 1 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 2 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 3 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 4 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 5 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 6 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 7 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 8 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: IPC Server handler 9 on 33788: exiting
    [junit] 12/03/08 10:36:01 INFO ipc.Server: Stopping IPC Server listener on 33788
    [junit] 12/03/08 10:36:01 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/08 10:36:01 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 543.814 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/08 10:36:02 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/08 10:36:02 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/08 10:36:02 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/08 10:36:02 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/08 10:36:02 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/08 10:36:02 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/08 10:36:02 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.101 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1271567918

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:780: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:835: Tests failed!

Total time: 23 minutes 35 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints


Jenkins build is back to normal : Pig-trunk #1218

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1218/changes>


Re: Build failed in Jenkins: Pig-trunk #1217

Posted by Daniel Dai <da...@hortonworks.com>.
builds@apache.org might help. They fixed it once in a while, but I am
tired of chasing them all the time.

Daniel

On Wed, Mar 28, 2012 at 9:26 AM, Jonathan Coveney <jc...@gmail.com> wrote:
> This failure is beginning to get annoying... what steps do we (and I'm
> willing to take them) in order to rectify this? Does apache need to renew
> the license? Do we need to turn off clover in the CI builds?
>
> 2012/3/28 Apache Jenkins Server <je...@builds.apache.org>
>
>> See <https://builds.apache.org/job/Pig-trunk/1217/changes>
>>
>> Changes:
>>
>> [daijy] Fix several e2e tests
>>
>> [jcoveney] [trunk] PIG-2619: HBaseStorage constructs a Scan with
>> cacheBlocks = false
>>
>> ------------------------------------------
>> [...truncated 6506 lines...]
>>  [findbugs]   org.mozilla.javascript.NativeJavaObject
>>  [findbugs]   jline.ConsoleReaderInputStream
>>  [findbugs]   org.apache.log4j.PropertyConfigurator
>>  [findbugs]   org.apache.hadoop.mapred.TaskID
>>  [findbugs]   org.apache.commons.cli.CommandLine
>>  [findbugs]   org.python.core.Py
>>  [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
>>  [findbugs]   org.apache.hadoop.io.LongWritable
>>  [findbugs]   org.antlr.runtime.BitSet
>>  [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
>>  [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
>>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
>>  [findbugs]   org.mozilla.javascript.NativeFunction
>>  [findbugs]   org.apache.hadoop.mapreduce.Counter
>>  [findbugs]   org.codehaus.jackson.JsonEncoding
>>  [findbugs]   org.codehaus.jackson.JsonParseException
>>  [findbugs]   org.python.core.PyCode
>>  [findbugs]   com.jcraft.jsch.HostKey
>>  [findbugs]   org.apache.hadoop.hbase.filter.Filter
>>  [findbugs]   org.apache.commons.logging.Log
>>  [findbugs]   com.google.common.util.concurrent.ListenableFuture
>>  [findbugs]   org.apache.hadoop.util.RunJar
>>  [findbugs]   org.apache.hadoop.mapred.Counters$Group
>>  [findbugs]   com.jcraft.jsch.ChannelExec
>>  [findbugs]   org.apache.hadoop.hbase.util.Base64
>>  [findbugs]   org.antlr.runtime.TokenStream
>>  [findbugs]   org.apache.hadoop.io.IOUtils
>>  [findbugs]   com.google.common.util.concurrent.CheckedFuture
>>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
>>  [findbugs]   org.apache.hadoop.fs.FSDataInputStream
>>  [findbugs]   org.python.core.PyObject
>>  [findbugs]   jline.History
>>  [findbugs]   org.apache.hadoop.io.BooleanWritable
>>  [findbugs]   org.apache.log4j.Logger
>>  [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
>>  [findbugs]   org.antlr.runtime.IntStream
>>  [findbugs]   org.apache.hadoop.util.ReflectionUtils
>>  [findbugs]   org.apache.hadoop.fs.ContentSummary
>>  [findbugs]   org.python.core.PyTuple
>>  [findbugs]   org.apache.hadoop.conf.Configuration
>>  [findbugs]   com.google.common.base.Joiner
>>  [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
>>  [findbugs]   org.apache.hadoop.mapred.Counters$Counter
>>  [findbugs]   com.jcraft.jsch.Channel
>>  [findbugs]   org.apache.hadoop.mapred.JobPriority
>>  [findbugs]   org.apache.commons.cli.Options
>>  [findbugs]   org.apache.hadoop.mapred.JobID
>>  [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
>>  [findbugs]   org.python.core.PyFrame
>>  [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
>>  [findbugs]   org.apache.hadoop.util.VersionInfo
>>  [findbugs]   org.python.core.PyString
>>  [findbugs]   org.apache.hadoop.io.Text$Comparator
>>  [findbugs]   org.antlr.runtime.MismatchedSetException
>>  [findbugs]   org.apache.hadoop.io.BytesWritable
>>  [findbugs]   org.apache.hadoop.fs.FsShell
>>  [findbugs]   org.mozilla.javascript.ImporterTopLevel
>>  [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
>>  [findbugs]   org.apache.hadoop.mapred.TaskReport
>>  [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
>>  [findbugs]   org.apache.commons.cli.HelpFormatter
>>  [findbugs]   org.mozilla.javascript.NativeObject
>>  [findbugs]   org.apache.hadoop.hbase.HConstants
>>  [findbugs]   org.apache.hadoop.io.serializer.Deserializer
>>  [findbugs]   org.antlr.runtime.FailedPredicateException
>>  [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
>>  [findbugs]   org.apache.hadoop.fs.FileStatus
>>  [findbugs]   org.apache.hadoop.hbase.client.Result
>>  [findbugs]   org.apache.hadoop.mapreduce.JobContext
>>  [findbugs]   org.codehaus.jackson.JsonGenerator
>>  [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
>>  [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
>>  [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
>>  [findbugs]   org.codehaus.jackson.map.util.LRUMap
>>  [findbugs]   org.apache.hadoop.hbase.util.Bytes
>>  [findbugs]   org.antlr.runtime.MismatchedTokenException
>>  [findbugs]   org.codehaus.jackson.JsonParser
>>  [findbugs]   com.jcraft.jsch.UserInfo
>>  [findbugs]   org.python.core.PyException
>>  [findbugs]   org.apache.commons.cli.ParseException
>>  [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
>>  [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
>>  [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
>>  [findbugs]   org.apache.log4j.Level
>>  [findbugs]   org.apache.hadoop.hbase.client.Scan
>>  [findbugs]   org.apache.hadoop.mapreduce.Job
>>  [findbugs]   com.google.common.util.concurrent.Futures
>>  [findbugs]   org.apache.commons.logging.LogFactory
>>  [findbugs]   org.apache.commons.codec.binary.Base64
>>  [findbugs]   org.codehaus.jackson.map.ObjectMapper
>>  [findbugs]   org.apache.hadoop.fs.FileSystem
>>  [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
>>  [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
>>  [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
>>  [findbugs]   org.antlr.runtime.tree.TreeAdaptor
>>  [findbugs]   org.apache.hadoop.mapred.RunningJob
>>  [findbugs]   org.antlr.runtime.CommonTokenStream
>>  [findbugs]   org.apache.hadoop.io.DataInputBuffer
>>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile
>>  [findbugs]   org.apache.commons.cli.GnuParser
>>  [findbugs]   org.mozilla.javascript.Context
>>  [findbugs]   org.apache.hadoop.io.FloatWritable
>>  [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
>>  [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
>>  [findbugs]   org.codehaus.jackson.JsonGenerationException
>>  [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
>>  [findbugs]   org.apache.hadoop.io.compress.GzipCodec
>>  [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
>>  [findbugs]   org.antlr.runtime.BaseRecognizer
>>  [findbugs]   org.apache.hadoop.fs.FileUtil
>>  [findbugs]   org.apache.hadoop.fs.Path
>>  [findbugs]   org.apache.hadoop.hbase.client.Put
>>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
>>  [findbugs]   jline.ConsoleReader
>>  [findbugs]   com.google.common.collect.Lists
>>  [findbugs]   org.apache.hadoop.mapreduce.MapContext
>>  [findbugs]   org.python.core.PyJavaPackage
>>  [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
>>  [findbugs]   org.python.core.PyStringMap
>>  [findbugs]   org.apache.hadoop.mapreduce.TaskID
>>  [findbugs]   org.apache.hadoop.hbase.client.HTable
>>  [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
>>  [findbugs]   org.apache.zookeeper.ZooKeeper
>>  [findbugs]   org.codehaus.jackson.map.JsonMappingException
>>  [findbugs]   org.python.core.PyFunction
>>  [findbugs]   org.antlr.runtime.TokenSource
>>  [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
>>  [findbugs]   com.jcraft.jsch.JSchException
>>  [findbugs]   org.python.util.PythonInterpreter
>>  [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
>>  [findbugs]   org.python.core.PyInteger
>>  [findbugs]   org.apache.hadoop.mapred.JobConf
>>  [findbugs]   org.apache.hadoop.util.bloom.Key
>>  [findbugs]   org.apache.hadoop.io.Text
>>  [findbugs]   org.antlr.runtime.NoViableAltException
>>  [findbugs]   org.apache.hadoop.util.GenericOptionsParser
>>  [findbugs]   org.apache.hadoop.mapreduce.JobID
>>  [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
>>  [findbugs]   org.apache.hadoop.filecache.DistributedCache
>>  [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
>>  [findbugs]   org.python.core.PyList
>>  [findbugs]   org.antlr.runtime.tree.TreeNodeStream
>>  [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
>>  [findbugs]   dk.brics.automaton.RegExp
>>  [findbugs]   org.mozilla.javascript.Scriptable
>>  [findbugs]   org.mozilla.javascript.EcmaError
>>  [findbugs]   org.apache.hadoop.io.serializer.Serializer
>>  [findbugs]   org.apache.hadoop.util.bloom.Filter
>>  [findbugs]   org.python.core.PyNone
>>  [findbugs]   org.mozilla.javascript.Function
>>  [findbugs]   org.python.core.PySystemState
>>  [findbugs]   org.antlr.runtime.RecognizerSharedState
>>  [findbugs]   org.codehaus.jackson.JsonFactory
>>  [findbugs]   org.antlr.runtime.EarlyExitException
>>  [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
>>  [findbugs]   org.apache.hadoop.util.LineReader
>>  [findbugs] Warnings generated: 25
>>  [findbugs] Missing classes: 233
>>  [findbugs] Calculating exit code...
>>  [findbugs] Setting 'missing class' flag (2)
>>  [findbugs] Setting 'bugs found' flag (1)
>>  [findbugs] Exit code set to: 3
>>  [findbugs] Java Result: 3
>>  [findbugs] Classes needed for analysis were missing
>>  [findbugs] Output saved to <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml
>> >
>>     [xslt] Processing <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
>> to <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html
>> >
>>     [xslt] Loading stylesheet
>> /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl
>>
>> BUILD SUCCESSFUL
>> Total time: 14 minutes 2 seconds
>>
>>
>> ======================================================================
>> ======================================================================
>> STORE: saving artifacts
>> ======================================================================
>> ======================================================================
>>
>>
>>
>>
>> ======================================================================
>> ======================================================================
>> CLEAN: cleaning workspace
>> ======================================================================
>> ======================================================================
>>
>>
>> Buildfile: build.xml
>>
>> clean:
>>   [delete] Deleting directory <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
>>   [delete] Deleting directory <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
>>   [delete] Deleting directory <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
>>   [delete] Deleting directory <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser
>> >
>>   [delete] Deleting: <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
>>   [delete] Deleting: <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>
>>
>> clean:
>>
>> clean:
>>
>> BUILD SUCCESSFUL
>> Total time: 0 seconds
>>
>>
>> ======================================================================
>> ======================================================================
>> ANALYSIS: ant -Drun.clover=true
>> -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit
>> generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes
>> -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME
>> -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME
>> -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
>> ======================================================================
>> ======================================================================
>>
>>
>> Buildfile: build.xml
>>
>> clover.setup:
>>    [mkdir] Created dir: <
>> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
>> [clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
>> [clover-setup] Loaded from:
>> /home/jenkins/tools/clover/latest/lib/clover.jar
>>
>> BUILD FAILED
>> java.lang.RuntimeException: Clover upgrades for your license ended
>> December 14 2010, and this version of Clover was built May 31 2011. Please
>> visit http://www.atlassian.com/clover/renew for information on upgrading
>> your license.
>>        at
>> com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
>>        at
>> com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
>>        at
>> com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
>>        at
>> org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
>>        at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
>>        at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>>        at java.lang.reflect.Method.invoke(Method.java:597)
>>        at
>> org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
>>        at org.apache.tools.ant.Task.perform(Task.java:348)
>>        at org.apache.tools.ant.Target.execute(Target.java:357)
>>        at org.apache.tools.ant.Target.performTasks(Target.java:385)
>>        at
>> org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
>>        at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
>>        at
>> org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
>>        at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
>>        at org.apache.tools.ant.Main.runBuild(Main.java:758)
>>        at org.apache.tools.ant.Main.startAnt(Main.java:217)
>>        at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
>>        at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)
>>
>> Total time: 1 second
>> Build step 'Execute shell' marked build as failure
>> [FINDBUGS] Skipping publisher since build result is FAILURE
>> Recording test results
>> Publishing Javadoc
>> Archiving artifacts
>> Recording fingerprints
>>

Re: Build failed in Jenkins: Pig-trunk #1217

Posted by Jonathan Coveney <jc...@gmail.com>.
This failure is beginning to get annoying... what steps do we (and I'm
willing to take them) in order to rectify this? Does apache need to renew
the license? Do we need to turn off clover in the CI builds?

2012/3/28 Apache Jenkins Server <je...@builds.apache.org>

> See <https://builds.apache.org/job/Pig-trunk/1217/changes>
>
> Changes:
>
> [daijy] Fix several e2e tests
>
> [jcoveney] [trunk] PIG-2619: HBaseStorage constructs a Scan with
> cacheBlocks = false
>
> ------------------------------------------
> [...truncated 6506 lines...]
>  [findbugs]   org.mozilla.javascript.NativeJavaObject
>  [findbugs]   jline.ConsoleReaderInputStream
>  [findbugs]   org.apache.log4j.PropertyConfigurator
>  [findbugs]   org.apache.hadoop.mapred.TaskID
>  [findbugs]   org.apache.commons.cli.CommandLine
>  [findbugs]   org.python.core.Py
>  [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
>  [findbugs]   org.apache.hadoop.io.LongWritable
>  [findbugs]   org.antlr.runtime.BitSet
>  [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
>  [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
>  [findbugs]   org.mozilla.javascript.NativeFunction
>  [findbugs]   org.apache.hadoop.mapreduce.Counter
>  [findbugs]   org.codehaus.jackson.JsonEncoding
>  [findbugs]   org.codehaus.jackson.JsonParseException
>  [findbugs]   org.python.core.PyCode
>  [findbugs]   com.jcraft.jsch.HostKey
>  [findbugs]   org.apache.hadoop.hbase.filter.Filter
>  [findbugs]   org.apache.commons.logging.Log
>  [findbugs]   com.google.common.util.concurrent.ListenableFuture
>  [findbugs]   org.apache.hadoop.util.RunJar
>  [findbugs]   org.apache.hadoop.mapred.Counters$Group
>  [findbugs]   com.jcraft.jsch.ChannelExec
>  [findbugs]   org.apache.hadoop.hbase.util.Base64
>  [findbugs]   org.antlr.runtime.TokenStream
>  [findbugs]   org.apache.hadoop.io.IOUtils
>  [findbugs]   com.google.common.util.concurrent.CheckedFuture
>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
>  [findbugs]   org.apache.hadoop.fs.FSDataInputStream
>  [findbugs]   org.python.core.PyObject
>  [findbugs]   jline.History
>  [findbugs]   org.apache.hadoop.io.BooleanWritable
>  [findbugs]   org.apache.log4j.Logger
>  [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
>  [findbugs]   org.antlr.runtime.IntStream
>  [findbugs]   org.apache.hadoop.util.ReflectionUtils
>  [findbugs]   org.apache.hadoop.fs.ContentSummary
>  [findbugs]   org.python.core.PyTuple
>  [findbugs]   org.apache.hadoop.conf.Configuration
>  [findbugs]   com.google.common.base.Joiner
>  [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
>  [findbugs]   org.apache.hadoop.mapred.Counters$Counter
>  [findbugs]   com.jcraft.jsch.Channel
>  [findbugs]   org.apache.hadoop.mapred.JobPriority
>  [findbugs]   org.apache.commons.cli.Options
>  [findbugs]   org.apache.hadoop.mapred.JobID
>  [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
>  [findbugs]   org.python.core.PyFrame
>  [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
>  [findbugs]   org.apache.hadoop.util.VersionInfo
>  [findbugs]   org.python.core.PyString
>  [findbugs]   org.apache.hadoop.io.Text$Comparator
>  [findbugs]   org.antlr.runtime.MismatchedSetException
>  [findbugs]   org.apache.hadoop.io.BytesWritable
>  [findbugs]   org.apache.hadoop.fs.FsShell
>  [findbugs]   org.mozilla.javascript.ImporterTopLevel
>  [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
>  [findbugs]   org.apache.hadoop.mapred.TaskReport
>  [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
>  [findbugs]   org.apache.commons.cli.HelpFormatter
>  [findbugs]   org.mozilla.javascript.NativeObject
>  [findbugs]   org.apache.hadoop.hbase.HConstants
>  [findbugs]   org.apache.hadoop.io.serializer.Deserializer
>  [findbugs]   org.antlr.runtime.FailedPredicateException
>  [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
>  [findbugs]   org.apache.hadoop.fs.FileStatus
>  [findbugs]   org.apache.hadoop.hbase.client.Result
>  [findbugs]   org.apache.hadoop.mapreduce.JobContext
>  [findbugs]   org.codehaus.jackson.JsonGenerator
>  [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
>  [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
>  [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
>  [findbugs]   org.codehaus.jackson.map.util.LRUMap
>  [findbugs]   org.apache.hadoop.hbase.util.Bytes
>  [findbugs]   org.antlr.runtime.MismatchedTokenException
>  [findbugs]   org.codehaus.jackson.JsonParser
>  [findbugs]   com.jcraft.jsch.UserInfo
>  [findbugs]   org.python.core.PyException
>  [findbugs]   org.apache.commons.cli.ParseException
>  [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
>  [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
>  [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
>  [findbugs]   org.apache.log4j.Level
>  [findbugs]   org.apache.hadoop.hbase.client.Scan
>  [findbugs]   org.apache.hadoop.mapreduce.Job
>  [findbugs]   com.google.common.util.concurrent.Futures
>  [findbugs]   org.apache.commons.logging.LogFactory
>  [findbugs]   org.apache.commons.codec.binary.Base64
>  [findbugs]   org.codehaus.jackson.map.ObjectMapper
>  [findbugs]   org.apache.hadoop.fs.FileSystem
>  [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
>  [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
>  [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
>  [findbugs]   org.antlr.runtime.tree.TreeAdaptor
>  [findbugs]   org.apache.hadoop.mapred.RunningJob
>  [findbugs]   org.antlr.runtime.CommonTokenStream
>  [findbugs]   org.apache.hadoop.io.DataInputBuffer
>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile
>  [findbugs]   org.apache.commons.cli.GnuParser
>  [findbugs]   org.mozilla.javascript.Context
>  [findbugs]   org.apache.hadoop.io.FloatWritable
>  [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
>  [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
>  [findbugs]   org.codehaus.jackson.JsonGenerationException
>  [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
>  [findbugs]   org.apache.hadoop.io.compress.GzipCodec
>  [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
>  [findbugs]   org.antlr.runtime.BaseRecognizer
>  [findbugs]   org.apache.hadoop.fs.FileUtil
>  [findbugs]   org.apache.hadoop.fs.Path
>  [findbugs]   org.apache.hadoop.hbase.client.Put
>  [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
>  [findbugs]   jline.ConsoleReader
>  [findbugs]   com.google.common.collect.Lists
>  [findbugs]   org.apache.hadoop.mapreduce.MapContext
>  [findbugs]   org.python.core.PyJavaPackage
>  [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
>  [findbugs]   org.python.core.PyStringMap
>  [findbugs]   org.apache.hadoop.mapreduce.TaskID
>  [findbugs]   org.apache.hadoop.hbase.client.HTable
>  [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
>  [findbugs]   org.apache.zookeeper.ZooKeeper
>  [findbugs]   org.codehaus.jackson.map.JsonMappingException
>  [findbugs]   org.python.core.PyFunction
>  [findbugs]   org.antlr.runtime.TokenSource
>  [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
>  [findbugs]   com.jcraft.jsch.JSchException
>  [findbugs]   org.python.util.PythonInterpreter
>  [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
>  [findbugs]   org.python.core.PyInteger
>  [findbugs]   org.apache.hadoop.mapred.JobConf
>  [findbugs]   org.apache.hadoop.util.bloom.Key
>  [findbugs]   org.apache.hadoop.io.Text
>  [findbugs]   org.antlr.runtime.NoViableAltException
>  [findbugs]   org.apache.hadoop.util.GenericOptionsParser
>  [findbugs]   org.apache.hadoop.mapreduce.JobID
>  [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
>  [findbugs]   org.apache.hadoop.filecache.DistributedCache
>  [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
>  [findbugs]   org.python.core.PyList
>  [findbugs]   org.antlr.runtime.tree.TreeNodeStream
>  [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
>  [findbugs]   dk.brics.automaton.RegExp
>  [findbugs]   org.mozilla.javascript.Scriptable
>  [findbugs]   org.mozilla.javascript.EcmaError
>  [findbugs]   org.apache.hadoop.io.serializer.Serializer
>  [findbugs]   org.apache.hadoop.util.bloom.Filter
>  [findbugs]   org.python.core.PyNone
>  [findbugs]   org.mozilla.javascript.Function
>  [findbugs]   org.python.core.PySystemState
>  [findbugs]   org.antlr.runtime.RecognizerSharedState
>  [findbugs]   org.codehaus.jackson.JsonFactory
>  [findbugs]   org.antlr.runtime.EarlyExitException
>  [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
>  [findbugs]   org.apache.hadoop.util.LineReader
>  [findbugs] Warnings generated: 25
>  [findbugs] Missing classes: 233
>  [findbugs] Calculating exit code...
>  [findbugs] Setting 'missing class' flag (2)
>  [findbugs] Setting 'bugs found' flag (1)
>  [findbugs] Exit code set to: 3
>  [findbugs] Java Result: 3
>  [findbugs] Classes needed for analysis were missing
>  [findbugs] Output saved to <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml
> >
>     [xslt] Processing <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
> to <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html
> >
>     [xslt] Loading stylesheet
> /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl
>
> BUILD SUCCESSFUL
> Total time: 14 minutes 2 seconds
>
>
> ======================================================================
> ======================================================================
> STORE: saving artifacts
> ======================================================================
> ======================================================================
>
>
>
>
> ======================================================================
> ======================================================================
> CLEAN: cleaning workspace
> ======================================================================
> ======================================================================
>
>
> Buildfile: build.xml
>
> clean:
>   [delete] Deleting directory <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
>   [delete] Deleting directory <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
>   [delete] Deleting directory <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
>   [delete] Deleting directory <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser
> >
>   [delete] Deleting: <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
>   [delete] Deleting: <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>
>
> clean:
>
> clean:
>
> BUILD SUCCESSFUL
> Total time: 0 seconds
>
>
> ======================================================================
> ======================================================================
> ANALYSIS: ant -Drun.clover=true
> -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit
> generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes
> -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME
> -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME
> -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
> ======================================================================
> ======================================================================
>
>
> Buildfile: build.xml
>
> clover.setup:
>    [mkdir] Created dir: <
> https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
> [clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
> [clover-setup] Loaded from:
> /home/jenkins/tools/clover/latest/lib/clover.jar
>
> BUILD FAILED
> java.lang.RuntimeException: Clover upgrades for your license ended
> December 14 2010, and this version of Clover was built May 31 2011. Please
> visit http://www.atlassian.com/clover/renew for information on upgrading
> your license.
>        at
> com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
>        at
> com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
>        at
> com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
>        at
> org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
>        at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
>        at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>        at java.lang.reflect.Method.invoke(Method.java:597)
>        at
> org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
>        at org.apache.tools.ant.Task.perform(Task.java:348)
>        at org.apache.tools.ant.Target.execute(Target.java:357)
>        at org.apache.tools.ant.Target.performTasks(Target.java:385)
>        at
> org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
>        at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
>        at
> org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
>        at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
>        at org.apache.tools.ant.Main.runBuild(Main.java:758)
>        at org.apache.tools.ant.Main.startAnt(Main.java:217)
>        at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
>        at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)
>
> Total time: 1 second
> Build step 'Execute shell' marked build as failure
> [FINDBUGS] Skipping publisher since build result is FAILURE
> Recording test results
> Publishing Javadoc
> Archiving artifacts
> Recording fingerprints
>

Build failed in Jenkins: Pig-trunk #1217

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1217/changes>

Changes:

[daijy] Fix several e2e tests

[jcoveney] [trunk] PIG-2619: HBaseStorage constructs a Scan with cacheBlocks = false

------------------------------------------
[...truncated 6506 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 25
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 14 minutes 2 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 1 second
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1216

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1216/changes>

Changes:

[jcoveney] PIG-2540 piggybank trunk AvroStorage can't read schema on s3 in e/r mode

[jcoveney] [PIG-2618] e2e fails to build

------------------------------------------
[...truncated 53336 lines...]
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/27 10:36:42 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/27 10:36:42 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/27 10:36:42 INFO ipc.Server: Stopping server on 39951
    [junit] 12/03/27 10:36:42 INFO ipc.Server: IPC Server handler 0 on 39951: exiting
    [junit] 12/03/27 10:36:42 INFO ipc.Server: IPC Server handler 1 on 39951: exiting
    [junit] 12/03/27 10:36:42 INFO ipc.Server: IPC Server handler 2 on 39951: exiting
    [junit] 12/03/27 10:36:42 INFO ipc.Server: Stopping IPC Server listener on 39951
    [junit] 12/03/27 10:36:42 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/27 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/27 10:36:42 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:48070, storageID=DS-1560670394-67.195.138.20-48070-1332844056038, infoPort=38899, ipcPort=39951):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/27 10:36:42 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/27 10:36:42 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/27 10:36:43 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:52395 to delete  blk_5471282960740557369_1102 blk_15802032107254893_1102
    [junit] 12/03/27 10:36:43 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:50381 to delete  blk_-4893342285719632994_1095 blk_8200084384064892388_1101 blk_15802032107254893_1102
    [junit] 12/03/27 10:36:43 INFO mapred.TaskTracker: Received 'KillJobAction' for job: job_20120327102736432_0012
    [junit] 12/03/27 10:36:43 WARN mapred.TaskTracker: Unknown job job_20120327102736432_0012 being deleted.
    [junit] 12/03/27 10:36:43 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:43 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:48070, storageID=DS-1560670394-67.195.138.20-48070-1332844056038, infoPort=38899, ipcPort=39951):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/27 10:36:43 INFO ipc.Server: Stopping server on 39951
    [junit] 12/03/27 10:36:43 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:43 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:43 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/27 10:36:43 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/27 10:36:43 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1965725253
    [junit] Shutting down DataNode 1
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1965725253
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/27 10:36:43 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/27 10:36:43 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/27 10:36:44 INFO ipc.Server: Stopping server on 59830
    [junit] 12/03/27 10:36:44 INFO ipc.Server: IPC Server handler 0 on 59830: exiting
    [junit] 12/03/27 10:36:44 INFO ipc.Server: IPC Server handler 1 on 59830: exiting
    [junit] 12/03/27 10:36:44 INFO ipc.Server: IPC Server handler 2 on 59830: exiting
    [junit] 12/03/27 10:36:44 INFO ipc.Server: Stopping IPC Server listener on 59830
    [junit] 12/03/27 10:36:44 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:44 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/27 10:36:44 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/27 10:36:44 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:46589, storageID=DS-1589661793-67.195.138.20-46589-1332844055694, infoPort=49266, ipcPort=59830):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/27 10:36:44 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/27 10:36:44 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:46589, storageID=DS-1589661793-67.195.138.20-46589-1332844055694, infoPort=49266, ipcPort=59830):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/27 10:36:45 INFO ipc.Server: Stopping server on 59830
    [junit] 12/03/27 10:36:45 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:45 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/27 10:36:45 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] Shutting down DataNode 0
    [junit] 12/03/27 10:36:45 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1574283536
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1574283536
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/27 10:36:45 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/27 10:36:45 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Scheduling block blk_-4893342285719632994_1095 file build/test/data/dfs/data/data1/current/blk_-4893342285719632994 for deletion
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Scheduling block blk_15802032107254893_1102 file build/test/data/dfs/data/data1/current/blk_15802032107254893 for deletion
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Scheduling block blk_8200084384064892388_1101 file build/test/data/dfs/data/data2/current/blk_8200084384064892388 for deletion
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Deleted block blk_-4893342285719632994_1095 at file build/test/data/dfs/data/data1/current/blk_-4893342285719632994
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Deleted block blk_8200084384064892388_1101 at file build/test/data/dfs/data/data2/current/blk_8200084384064892388
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Deleted block blk_15802032107254893_1102 at file build/test/data/dfs/data/data1/current/blk_15802032107254893
    [junit] 12/03/27 10:36:45 INFO ipc.Server: Stopping server on 43609
    [junit] 12/03/27 10:36:45 INFO ipc.Server: IPC Server handler 0 on 43609: exiting
    [junit] 12/03/27 10:36:45 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:45 INFO ipc.Server: Stopping IPC Server listener on 43609
    [junit] 12/03/27 10:36:45 INFO ipc.Server: IPC Server handler 2 on 43609: exiting
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/27 10:36:45 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:50381, storageID=DS-19095612-67.195.138.20-50381-1332844055325, infoPort=49614, ipcPort=43609):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/27 10:36:45 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/27 10:36:45 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/27 10:36:45 INFO ipc.Server: IPC Server handler 1 on 43609: exiting
    [junit] 12/03/27 10:36:45 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/27 10:36:46 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:46 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:50381, storageID=DS-19095612-67.195.138.20-50381-1332844055325, infoPort=49614, ipcPort=43609):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/27 10:36:46 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/27 10:36:46 INFO ipc.Server: Stopping server on 43609
    [junit] 12/03/27 10:36:46 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:46 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/27 10:36:46 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/27 10:36:46 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/27 10:36:46 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId711748214
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId711748214
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/27 10:36:46 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/27 10:36:46 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/27 10:36:46 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:48070 to delete  blk_5471282960740557369_1102 blk_-4893342285719632994_1095 blk_8200084384064892388_1101
    [junit] 12/03/27 10:36:46 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:46589 to delete  blk_5471282960740557369_1102 blk_-4893342285719632994_1095 blk_8200084384064892388_1101 blk_15802032107254893_1102
    [junit] 12/03/27 10:36:46 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/27 10:36:46 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 16Number of transactions batched in Syncs: 156 Number of syncs: 348 SyncTimes(ms): 7132 630 
    [junit] 12/03/27 10:36:46 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/27 10:36:46 INFO ipc.Server: Stopping server on 54579
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 0 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 1 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 2 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 4 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 3 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 6 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 7 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 5 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 9 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: IPC Server handler 8 on 54579: exiting
    [junit] 12/03/27 10:36:46 INFO ipc.Server: Stopping IPC Server listener on 54579
    [junit] 12/03/27 10:36:46 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/27 10:36:46 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 544.399 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/27 10:36:47 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/27 10:36:47 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/27 10:36:47 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/27 10:36:47 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/27 10:36:47 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/27 10:36:47 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/27 10:36:47 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.107 sec
   [delete] Deleting directory /tmp/pig_junit_tmp382831045

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 24 minutes 1 second
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1215

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1215/changes>

Changes:

[daijy] Fix several e2e test failures

------------------------------------------
[...truncated 6487 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 25
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 9 minutes 43 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 1 second
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1214

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1214/changes>

Changes:

[daijy] PIG-2550: Custom tuple results in 'Unexpected datatype 110 while reading tuplefrom binary file' while spilling

------------------------------------------
[...truncated 52722 lines...]
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/24 10:36:20 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 2
    [junit] 12/03/24 10:36:20 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/24 10:36:20 INFO ipc.Server: Stopping server on 41818
    [junit] 12/03/24 10:36:20 INFO ipc.Server: IPC Server handler 0 on 41818: exiting
    [junit] 12/03/24 10:36:20 INFO ipc.Server: IPC Server handler 1 on 41818: exiting
    [junit] 12/03/24 10:36:20 INFO ipc.Server: IPC Server handler 2 on 41818: exiting
    [junit] 12/03/24 10:36:20 INFO ipc.Server: Stopping IPC Server listener on 41818
    [junit] 12/03/24 10:36:20 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:20 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/24 10:36:20 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/24 10:36:20 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:50786, storageID=DS-341775186-67.195.138.20-50786-1332584847527, infoPort=47189, ipcPort=41818):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/24 10:36:20 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/24 10:36:20 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/24 10:36:20 INFO datanode.DataNode: Scheduling block blk_5735442229728291317_1102 file build/test/data/dfs/data/data1/current/blk_5735442229728291317 for deletion
    [junit] 12/03/24 10:36:20 INFO datanode.DataNode: Deleted block blk_5735442229728291317_1102 at file build/test/data/dfs/data/data1/current/blk_5735442229728291317
    [junit] 12/03/24 10:36:21 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:21 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:50786, storageID=DS-341775186-67.195.138.20-50786-1332584847527, infoPort=47189, ipcPort=41818):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/24 10:36:21 INFO ipc.Server: Stopping server on 41818
    [junit] 12/03/24 10:36:21 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:21 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:21 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/24 10:36:21 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/24 10:36:21 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-644488429
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-644488429
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/24 10:36:21 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 1
    [junit] 12/03/24 10:36:21 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/24 10:36:21 INFO ipc.Server: Stopping server on 35517
    [junit] 12/03/24 10:36:21 INFO ipc.Server: IPC Server handler 0 on 35517: exiting
    [junit] 12/03/24 10:36:21 INFO ipc.Server: IPC Server handler 1 on 35517: exiting
    [junit] 12/03/24 10:36:21 INFO ipc.Server: IPC Server handler 2 on 35517: exiting
    [junit] 12/03/24 10:36:21 INFO ipc.Server: Stopping IPC Server listener on 35517
    [junit] 12/03/24 10:36:21 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/24 10:36:21 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:21 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/24 10:36:21 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:34132, storageID=DS-1176409081-67.195.138.20-34132-1332584847144, infoPort=35348, ipcPort=35517):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/24 10:36:21 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/24 10:36:22 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/24 10:36:22 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:22 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:34132, storageID=DS-1176409081-67.195.138.20-34132-1332584847144, infoPort=35348, ipcPort=35517):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/24 10:36:22 INFO ipc.Server: Stopping server on 35517
    [junit] 12/03/24 10:36:22 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:22 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:22 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/24 10:36:22 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/24 10:36:22 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1960655114
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1960655114
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/24 10:36:22 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 0
    [junit] 12/03/24 10:36:22 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/24 10:36:22 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:34132 to delete  blk_-3841136412746938090_1102
    [junit] 12/03/24 10:36:22 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:57111 to delete  blk_-3841136412746938090_1102 blk_-8249542005391452851_1101 blk_-5743994056258678942_1095
    [junit] 12/03/24 10:36:22 INFO ipc.Server: Stopping server on 55622
    [junit] 12/03/24 10:36:22 INFO ipc.Server: IPC Server handler 0 on 55622: exiting
    [junit] 12/03/24 10:36:22 INFO ipc.Server: IPC Server handler 1 on 55622: exiting
    [junit] 12/03/24 10:36:22 INFO ipc.Server: Stopping IPC Server listener on 55622
    [junit] 12/03/24 10:36:22 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/24 10:36:22 INFO ipc.Server: IPC Server handler 2 on 55622: exiting
    [junit] 12/03/24 10:36:22 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:22 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/24 10:36:22 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:57111, storageID=DS-316389568-67.195.138.20-57111-1332584846745, infoPort=56108, ipcPort=55622):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/24 10:36:22 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/24 10:36:22 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/24 10:36:23 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:23 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:57111, storageID=DS-316389568-67.195.138.20-57111-1332584846745, infoPort=56108, ipcPort=55622):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/24 10:36:23 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/24 10:36:23 INFO ipc.Server: Stopping server on 55622
    [junit] 12/03/24 10:36:23 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:23 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/24 10:36:23 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/24 10:36:23 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/24 10:36:23 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1638739691
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1638739691
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/24 10:36:23 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/24 10:36:23 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/24 10:36:23 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 12Number of transactions batched in Syncs: 161 Number of syncs: 348 SyncTimes(ms): 4314 403 
    [junit] 12/03/24 10:36:23 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/24 10:36:23 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/24 10:36:23 INFO ipc.Server: Stopping server on 43888
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 0 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/24 10:36:23 INFO ipc.Server: Stopping IPC Server listener on 43888
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 6 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 3 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 2 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 7 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 8 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 5 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 1 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 4 on 43888: exiting
    [junit] 12/03/24 10:36:23 INFO ipc.Server: IPC Server handler 9 on 43888: exiting
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 530.44 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/24 10:36:24 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/24 10:36:24 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/24 10:36:24 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/24 10:36:24 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/24 10:36:24 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/24 10:36:24 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/24 10:36:24 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.105 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1526002099

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 23 minutes 41 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1213

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1213/changes>

Changes:

[daijy] PIG-2442: Multiple Stores in pig streaming causes infinite waiting

[daijy] PIG-2442: Multiple Stores in pig streaming causes infinite waiting

[daijy] PIG-1270: Push limit into loader

[daijy] PIG-2589: missing TestBoolean.java

[daijy] PIG-2609: e2e harness: make hdfs base path configurable (outside default.conf)

------------------------------------------
[...truncated 52753 lines...]
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/23 10:36:42 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1666231610
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1666231610
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] Shutting down DataNode 2
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/23 10:36:42 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/23 10:36:42 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 49707
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 0 on 49707: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 2 on 49707: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 1 on 49707: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server listener on 49707
    [junit] 12/03/23 10:36:42 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:51845, storageID=DS-253180652-67.195.138.20-51845-1332498470010, infoPort=41614, ipcPort=49707):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/23 10:36:42 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:51845, storageID=DS-253180652-67.195.138.20-51845-1332498470010, infoPort=41614, ipcPort=49707):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 49707
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/23 10:36:42 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1448654454
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1448654454
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] Shutting down DataNode 1
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/23 10:36:42 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/23 10:36:42 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 56351
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 2 on 56351: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server listener on 56351
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 1 on 56351: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 0 on 56351: exiting
    [junit] 12/03/23 10:36:42 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:35690, storageID=DS-692503111-67.195.138.20-35690-1332498469661, infoPort=33131, ipcPort=56351):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/23 10:36:42 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:35690, storageID=DS-692503111-67.195.138.20-35690-1332498469661, infoPort=33131, ipcPort=56351):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 56351
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/23 10:36:42 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId723625249
    [junit] Shutting down DataNode 0
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId723625249
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/23 10:36:42 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/23 10:36:42 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 34202
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 0 on 34202: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server listener on 34202
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 1 on 34202: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 2 on 34202: exiting
    [junit] 12/03/23 10:36:42 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:39763, storageID=DS-810296157-67.195.138.20-39763-1332498469278, infoPort=46240, ipcPort=34202):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/23 10:36:42 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:39763, storageID=DS-810296157-67.195.138.20-39763-1332498469278, infoPort=46240, ipcPort=34202):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/23 10:36:42 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 34202
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/23 10:36:42 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/23 10:36:42 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId104293769
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId104293769
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/23 10:36:42 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/23 10:36:42 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/23 10:36:42 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/23 10:36:42 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/23 10:36:42 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 9Number of transactions batched in Syncs: 150 Number of syncs: 348 SyncTimes(ms): 6114 206 
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping server on 49636
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 0 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 1 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 2 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 3 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 4 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server listener on 49636
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 8 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 5 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 7 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 6 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: IPC Server handler 9 on 49636: exiting
    [junit] 12/03/23 10:36:42 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 526.817 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/23 10:36:43 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/23 10:36:43 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/23 10:36:43 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/23 10:36:43 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/23 10:36:43 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/23 10:36:43 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/23 10:36:43 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.117 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1523870312

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 23 minutes 58 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1212

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1212/changes>

Changes:

[daijy] PIG-2608: Typo in PigStorage documentation for source tagging

[daijy] PIG-2505: missing test_no_exception

------------------------------------------
[...truncated 51912 lines...]
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/21 10:35:29 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 2
    [junit] 12/03/21 10:35:29 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/21 10:35:29 INFO ipc.Server: Stopping server on 58134
    [junit] 12/03/21 10:35:29 INFO ipc.Server: Stopping IPC Server listener on 58134
    [junit] 12/03/21 10:35:29 INFO ipc.Server: IPC Server handler 2 on 58134: exiting
    [junit] 12/03/21 10:35:29 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/21 10:35:29 INFO ipc.Server: IPC Server handler 1 on 58134: exiting
    [junit] 12/03/21 10:35:29 INFO ipc.Server: IPC Server handler 0 on 58134: exiting
    [junit] 12/03/21 10:35:29 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:29 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/21 10:35:29 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:33760, storageID=DS-1101057205-67.195.138.20-33760-1332325586734, infoPort=52166, ipcPort=58134):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/21 10:35:29 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/21 10:35:29 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:33760, storageID=DS-1101057205-67.195.138.20-33760-1332325586734, infoPort=52166, ipcPort=58134):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/21 10:35:29 INFO ipc.Server: Stopping server on 58134
    [junit] 12/03/21 10:35:29 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:29 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:29 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/21 10:35:29 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/21 10:35:29 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/21 10:35:30 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:60141 to delete  blk_-7192402847186639452_1095 blk_-1109841277642312362_1101
    [junit] 12/03/21 10:35:30 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:38982 to delete  blk_-422404991587375213_1102 blk_-1109841277642312362_1101 blk_-6062460517911994428_1102
    [junit] 12/03/21 10:35:30 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:30 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1816741885
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1816741885
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/21 10:35:30 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 1
    [junit] 12/03/21 10:35:30 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/21 10:35:31 INFO ipc.Server: Stopping server on 60494
    [junit] 12/03/21 10:35:31 INFO ipc.Server: IPC Server handler 0 on 60494: exiting
    [junit] 12/03/21 10:35:31 INFO ipc.Server: IPC Server handler 1 on 60494: exiting
    [junit] 12/03/21 10:35:31 INFO ipc.Server: IPC Server handler 2 on 60494: exiting
    [junit] 12/03/21 10:35:31 INFO ipc.Server: Stopping IPC Server listener on 60494
    [junit] 12/03/21 10:35:31 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:31 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/21 10:35:31 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/21 10:35:31 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:60141, storageID=DS-1468001538-67.195.138.20-60141-1332325586376, infoPort=37555, ipcPort=60494):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/21 10:35:31 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/21 10:35:31 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:60141, storageID=DS-1468001538-67.195.138.20-60141-1332325586376, infoPort=37555, ipcPort=60494):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/21 10:35:32 INFO ipc.Server: Stopping server on 60494
    [junit] 12/03/21 10:35:32 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:32 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/21 10:35:32 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/21 10:35:32 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-770761283
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-770761283
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] Shutting down DataNode 0
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/21 10:35:32 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/21 10:35:32 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/21 10:35:32 INFO ipc.Server: Stopping server on 37132
    [junit] 12/03/21 10:35:32 INFO ipc.Server: IPC Server handler 1 on 37132: exiting
    [junit] 12/03/21 10:35:32 INFO ipc.Server: IPC Server handler 0 on 37132: exiting
    [junit] 12/03/21 10:35:32 INFO ipc.Server: Stopping IPC Server listener on 37132
    [junit] 12/03/21 10:35:32 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/21 10:35:32 INFO ipc.Server: IPC Server handler 2 on 37132: exiting
    [junit] 12/03/21 10:35:32 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:40590, storageID=DS-1567455724-67.195.138.20-40590-1332325586005, infoPort=52252, ipcPort=37132):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/21 10:35:32 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:40590, storageID=DS-1567455724-67.195.138.20-40590-1332325586005, infoPort=52252, ipcPort=37132):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/21 10:35:32 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/21 10:35:32 INFO ipc.Server: Stopping server on 37132
    [junit] 12/03/21 10:35:32 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:32 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:32 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/21 10:35:32 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/21 10:35:32 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/21 10:35:33 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/21 10:35:33 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1993685444
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1993685444
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/21 10:35:33 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/21 10:35:33 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/21 10:35:33 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/21 10:35:33 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 12Number of transactions batched in Syncs: 157 Number of syncs: 348 SyncTimes(ms): 4055 247 
    [junit] 12/03/21 10:35:33 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/21 10:35:33 INFO ipc.Server: Stopping server on 33861
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 1 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 0 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 2 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 3 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 4 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 5 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 6 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 7 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 8 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: IPC Server handler 9 on 33861: exiting
    [junit] 12/03/21 10:35:33 INFO ipc.Server: Stopping IPC Server listener on 33861
    [junit] 12/03/21 10:35:33 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/21 10:35:33 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 540.696 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/21 10:35:34 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/21 10:35:34 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/21 10:35:34 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/21 10:35:34 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/21 10:35:34 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/21 10:35:34 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/21 10:35:34 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.107 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1816288462

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 24 minutes 0 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1211

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1211/changes>

Changes:

[daijy] PIG-2505: AvroStorage won't read any file not ending in .avro

[dvryaboy] PIG-2604: Pig should print its build info at runtime

[daijy] Fix Unit test TestMultiQueryBasic, TestSampleOptimizer caused by PIG-2573

[daijy] Fix Unit test failure TestPigServer

------------------------------------------
[...truncated 6633 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 25
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 12 minutes 7 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 1 second
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1210

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1210/changes>

Changes:

[daijy] Fix test failure in cmdline.conf

------------------------------------------
[...truncated 50268 lines...]
    [junit] Shutting down DataNode 2
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/19 22:35:16 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/19 22:35:16 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/19 22:35:16 INFO ipc.Server: Stopping server on 39126
    [junit] 12/03/19 22:35:16 INFO ipc.Server: IPC Server handler 0 on 39126: exiting
    [junit] 12/03/19 22:35:16 INFO ipc.Server: IPC Server handler 1 on 39126: exiting
    [junit] 12/03/19 22:35:16 INFO ipc.Server: IPC Server handler 2 on 39126: exiting
    [junit] 12/03/19 22:35:16 INFO ipc.Server: Stopping IPC Server listener on 39126
    [junit] 12/03/19 22:35:16 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/19 22:35:16 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:16 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/19 22:35:16 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:60327, storageID=DS-1464507831-67.195.138.20-60327-1332195977739, infoPort=42748, ipcPort=39126):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/19 22:35:16 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/19 22:35:16 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:41343 to delete  blk_2825252729486616484_1102 blk_2120920925068208348_1101 blk_-4467477274313762780_1095
    [junit] 12/03/19 22:35:16 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:60327 to delete  blk_2120920925068208348_1101 blk_-4467477274313762780_1095
    [junit] 12/03/19 22:35:16 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:60327, storageID=DS-1464507831-67.195.138.20-60327-1332195977739, infoPort=42748, ipcPort=39126):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping server on 39126
    [junit] 12/03/19 22:35:17 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/19 22:35:17 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/19 22:35:17 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/19 22:35:17 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-944817812
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-944817812
    [junit] Shutting down DataNode 1
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/19 22:35:17 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/19 22:35:17 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Scheduling block blk_-4467477274313762780_1095 file build/test/data/dfs/data/data2/current/blk_-4467477274313762780 for deletion
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Scheduling block blk_2120920925068208348_1101 file build/test/data/dfs/data/data1/current/blk_2120920925068208348 for deletion
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Scheduling block blk_2825252729486616484_1102 file build/test/data/dfs/data/data2/current/blk_2825252729486616484 for deletion
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Deleted block blk_-4467477274313762780_1095 at file build/test/data/dfs/data/data2/current/blk_-4467477274313762780
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Deleted block blk_2825252729486616484_1102 at file build/test/data/dfs/data/data2/current/blk_2825252729486616484
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Deleted block blk_2120920925068208348_1101 at file build/test/data/dfs/data/data1/current/blk_2120920925068208348
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping server on 57038
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 0 on 57038: exiting
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping IPC Server listener on 57038
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/19 22:35:17 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 2 on 57038: exiting
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 1 on 57038: exiting
    [junit] 12/03/19 22:35:17 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:43134, storageID=DS-1069945764-67.195.138.20-43134-1332195977389, infoPort=33651, ipcPort=57038):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/19 22:35:17 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:43134, storageID=DS-1069945764-67.195.138.20-43134-1332195977389, infoPort=33651, ipcPort=57038):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping server on 57038
    [junit] 12/03/19 22:35:17 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/19 22:35:17 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/19 22:35:17 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/19 22:35:17 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-186520548
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-186520548
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/19 22:35:17 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 0
    [junit] 12/03/19 22:35:17 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping server on 34286
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 2 on 34286: exiting
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/19 22:35:17 INFO ipc.Server: Stopping IPC Server listener on 34286
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 1 on 34286: exiting
    [junit] 12/03/19 22:35:17 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/19 22:35:17 INFO ipc.Server: IPC Server handler 0 on 34286: exiting
    [junit] 12/03/19 22:35:17 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:41343, storageID=DS-518829668-67.195.138.20-41343-1332195976992, infoPort=60370, ipcPort=34286):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/19 22:35:17 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/19 22:35:18 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/19 22:35:18 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/19 22:35:18 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:41343, storageID=DS-518829668-67.195.138.20-41343-1332195976992, infoPort=60370, ipcPort=34286):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/19 22:35:18 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/19 22:35:18 INFO ipc.Server: Stopping server on 34286
    [junit] 12/03/19 22:35:18 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:18 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/19 22:35:18 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/19 22:35:18 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/19 22:35:18 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-267868277
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-267868277
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/19 22:35:18 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/19 22:35:18 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/19 22:35:18 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/19 22:35:18 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/19 22:35:18 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 15Number of transactions batched in Syncs: 152 Number of syncs: 348 SyncTimes(ms): 5614 200 
    [junit] 12/03/19 22:35:18 INFO ipc.Server: Stopping server on 53534
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 1 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 0 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 2 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 5 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 3 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 4 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 6 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 7 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 9 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: IPC Server handler 8 on 53534: exiting
    [junit] 12/03/19 22:35:18 INFO ipc.Server: Stopping IPC Server listener on 53534
    [junit] 12/03/19 22:35:18 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/19 22:35:18 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 534.962 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/19 22:35:19 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/19 22:35:19 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/19 22:35:19 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/19 22:35:19 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/19 22:35:19 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/19 22:35:19 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/19 22:35:19 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.106 sec
   [delete] Deleting directory /tmp/pig_junit_tmp529830336

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 23 minutes 24 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1209

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1209/changes>

Changes:

[daijy] PIG-2589: Additional e2e test for 0.10 new features

[daijy] PIG-2585: Enable ignored e2e test cases

[daijy] PIG-2563: IndexOutOfBoundsException: while projecting fields from a bag

------------------------------------------
[...truncated 6632 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 25
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 12 minutes 18 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 0 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1208

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1208/changes>

Changes:

[daijy] PIG-2411: AvroStorage UDF in PiggyBank fails to STORE a bag of single-field tuples as Avro arrays

[daijy] PIG-2182: Add more append support to DataByteArray

[daijy] PIG-438: Handle realiasing of existing Alias (A=B;)

[daijy] PIG-2576: Change in behavior for UDFContext.getUDFContext().getJobConf() in front-end

------------------------------------------
[...truncated 49430 lines...]
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/17 10:39:53 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/17 10:39:53 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/17 10:39:53 INFO ipc.Server: Stopping server on 33585
    [junit] 12/03/17 10:39:53 INFO ipc.Server: IPC Server handler 1 on 33585: exiting
    [junit] 12/03/17 10:39:53 INFO ipc.Server: IPC Server handler 2 on 33585: exiting
    [junit] 12/03/17 10:39:53 INFO ipc.Server: IPC Server handler 0 on 33585: exiting
    [junit] 12/03/17 10:39:53 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:53 INFO ipc.Server: Stopping IPC Server listener on 33585
    [junit] 12/03/17 10:39:53 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/17 10:39:53 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/17 10:39:53 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:54364, storageID=DS-1071486913-67.195.138.20-54364-1331980284829, infoPort=41202, ipcPort=33585):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/17 10:39:53 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/17 10:39:53 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Scheduling block blk_-8440186488175917459_1102 file build/test/data/dfs/data/data1/current/blk_-8440186488175917459 for deletion
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Scheduling block blk_-1632205084621063090_1095 file build/test/data/dfs/data/data1/current/blk_-1632205084621063090 for deletion
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Scheduling block blk_4221040630237866643_1101 file build/test/data/dfs/data/data2/current/blk_4221040630237866643 for deletion
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Deleted block blk_-8440186488175917459_1102 at file build/test/data/dfs/data/data1/current/blk_-8440186488175917459
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Deleted block blk_4221040630237866643_1101 at file build/test/data/dfs/data/data2/current/blk_4221040630237866643
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Deleted block blk_-1632205084621063090_1095 at file build/test/data/dfs/data/data1/current/blk_-1632205084621063090
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:54364, storageID=DS-1071486913-67.195.138.20-54364-1331980284829, infoPort=41202, ipcPort=33585):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/17 10:39:54 INFO ipc.Server: Stopping server on 33585
    [junit] 12/03/17 10:39:54 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/17 10:39:54 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/17 10:39:54 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/17 10:39:54 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1855770064
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1855770064
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/17 10:39:54 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 1
    [junit] 12/03/17 10:39:54 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/17 10:39:54 INFO ipc.Server: Stopping server on 48641
    [junit] 12/03/17 10:39:54 INFO ipc.Server: IPC Server handler 0 on 48641: exiting
    [junit] 12/03/17 10:39:54 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:54 INFO ipc.Server: IPC Server handler 2 on 48641: exiting
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/17 10:39:54 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/17 10:39:54 INFO ipc.Server: Stopping IPC Server listener on 48641
    [junit] 12/03/17 10:39:54 INFO ipc.Server: IPC Server handler 1 on 48641: exiting
    [junit] 12/03/17 10:39:54 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:54384, storageID=DS-1222946715-67.195.138.20-54384-1331980284488, infoPort=36253, ipcPort=48641):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/17 10:39:54 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/17 10:39:55 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:54384, storageID=DS-1222946715-67.195.138.20-54384-1331980284488, infoPort=36253, ipcPort=48641):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping server on 48641
    [junit] 12/03/17 10:39:55 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/17 10:39:55 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/17 10:39:55 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/17 10:39:55 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1866301662
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1866301662
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/17 10:39:55 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 0
    [junit] 12/03/17 10:39:55 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping server on 36975
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 0 on 36975: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping IPC Server listener on 36975
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 2 on 36975: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/17 10:39:55 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 1 on 36975: exiting
    [junit] 12/03/17 10:39:55 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:59492, storageID=DS-1222220379-67.195.138.20-59492-1331980284116, infoPort=47499, ipcPort=36975):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/17 10:39:55 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:59492, storageID=DS-1222220379-67.195.138.20-59492-1331980284116, infoPort=47499, ipcPort=36975):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/17 10:39:55 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping server on 36975
    [junit] 12/03/17 10:39:55 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:55 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/17 10:39:55 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/17 10:39:55 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/17 10:39:55 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId2003364458
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId2003364458
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/17 10:39:55 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/17 10:39:55 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/17 10:39:55 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/17 10:39:55 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/17 10:39:55 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 12Number of transactions batched in Syncs: 155 Number of syncs: 348 SyncTimes(ms): 3957 397 
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping server on 33147
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 1 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 0 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 2 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 3 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 4 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 7 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 5 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 6 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 8 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO ipc.Server: IPC Server handler 9 on 33147: exiting
    [junit] 12/03/17 10:39:55 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping IPC Server listener on 33147
    [junit] 12/03/17 10:39:55 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 503.27 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/17 10:39:56 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/17 10:39:56 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/17 10:39:56 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/17 10:39:56 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/17 10:39:56 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/17 10:39:56 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/17 10:39:56 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.106 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1093124958

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:781: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:836: Tests failed!

Total time: 22 minutes 55 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1207

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1207/changes>

Changes:

[julien] PIG-2573: Automagically setting parallelism based on input file size does not work with HCatalog (traviscrawford via julien)

[daijy] Fix TestPigServer.testRegisterRemoteMacro failure caused by PIG-2565

[thejas] PIG-2590: running ant tar and rpm targets on same copy of pig source results in problems (thejas)

------------------------------------------
[...truncated 6627 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 24
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 12 minutes 26 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 1 second
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1206

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1206/changes>

Changes:

[daijy] PIG-2581: HashFNV inconsistent/non-deterministic due to default platform encoding

------------------------------------------
[...truncated 48150 lines...]
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/16 11:13:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1423569691
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1423569691
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/16 11:13:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/16 11:13:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] Shutting down DataNode 2
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 44489
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 0 on 44489: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 2 on 44489: exiting
    [junit] 12/03/16 11:13:47 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:51773, storageID=DS-1312368450-67.195.138.20-51773-1331895887490, infoPort=43618, ipcPort=44489):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 1 on 44489: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server listener on 44489
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/16 11:13:47 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:51773, storageID=DS-1312368450-67.195.138.20-51773-1331895887490, infoPort=43618, ipcPort=44489):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 44489
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/16 11:13:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1533527256
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1533527256
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/16 11:13:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/16 11:13:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] Shutting down DataNode 1
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 32934
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 1 on 32934: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 0 on 32934: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 2 on 32934: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server listener on 32934
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:42087, storageID=DS-831219094-67.195.138.20-42087-1331895887156, infoPort=40659, ipcPort=32934):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/16 11:13:47 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:42087, storageID=DS-831219094-67.195.138.20-42087-1331895887156, infoPort=40659, ipcPort=32934):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 32934
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/16 11:13:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1033205917
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1033205917
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] Shutting down DataNode 0
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/16 11:13:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/16 11:13:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 50934
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 0 on 50934: exiting
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 1 on 50934: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 2 on 50934: exiting
    [junit] 12/03/16 11:13:47 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:39599, storageID=DS-298741963-67.195.138.20-39599-1331895886773, infoPort=41934, ipcPort=50934):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server listener on 50934
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/16 11:13:47 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:39599, storageID=DS-298741963-67.195.138.20-39599-1331895886773, infoPort=41934, ipcPort=50934):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/16 11:13:47 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 50934
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/16 11:13:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/16 11:13:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-521219033
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-521219033
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/16 11:13:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/16 11:13:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/16 11:13:47 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/16 11:13:47 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/16 11:13:47 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 11Number of transactions batched in Syncs: 160 Number of syncs: 347 SyncTimes(ms): 5652 425 
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping server on 43924
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 4 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 5 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 6 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 3 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 8 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 9 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 2 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 1 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 0 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: IPC Server handler 7 on 43924: exiting
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server listener on 43924
    [junit] 12/03/16 11:13:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/16 11:13:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 534.48 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/16 11:13:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/16 11:13:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/16 11:13:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/16 11:13:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/16 11:13:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/16 11:13:48 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/16 11:13:48 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.108 sec
   [delete] Deleting directory /tmp/pig_junit_tmp2050560082

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:780: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:835: Tests failed!

Total time: 22 minutes 56 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1205

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1205/changes>

Changes:

[daijy] PIG-2588: e2e harness: use pig command for cluster deploy

[daijy] Fix unit test TestMacroExpansion

[daijy] Fix e2e test Grunt_6

[daijy] Fix e2e test Jython_Command_1

[daijy] PIG-2565: Support IMPORT for macros stored in S3 Buckets

[daijy] PIG-2570: LimitOptimizer fails with dynamic LIMIT argument

------------------------------------------
[...truncated 47676 lines...]
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/15 00:39:45 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 2
    [junit] 12/03/15 00:39:45 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/15 00:39:45 INFO ipc.Server: Stopping server on 34128
    [junit] 12/03/15 00:39:45 INFO ipc.Server: IPC Server handler 0 on 34128: exiting
    [junit] 12/03/15 00:39:45 INFO ipc.Server: Stopping IPC Server listener on 34128
    [junit] 12/03/15 00:39:45 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/15 00:39:45 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:45 INFO ipc.Server: IPC Server handler 1 on 34128: exiting
    [junit] 12/03/15 00:39:45 INFO ipc.Server: IPC Server handler 2 on 34128: exiting
    [junit] 12/03/15 00:39:45 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/15 00:39:45 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:49226, storageID=DS-523686638-67.195.138.20-49226-1331771462639, infoPort=59883, ipcPort=34128):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/15 00:39:45 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/15 00:39:45 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/15 00:39:45 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:53496 to delete  blk_1495298642073345663_1102 blk_2989927980216895302_1095
    [junit] 12/03/15 00:39:45 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:40675 to delete  blk_-4826409291013281508_1101 blk_1495298642073345663_1102 blk_2989927980216895302_1095
    [junit] 12/03/15 00:39:46 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/15 00:39:46 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:49226, storageID=DS-523686638-67.195.138.20-49226-1331771462639, infoPort=59883, ipcPort=34128):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
    [junit] 12/03/15 00:39:46 INFO ipc.Server: Stopping server on 34128
    [junit] 12/03/15 00:39:46 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:46 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/15 00:39:46 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/15 00:39:46 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/15 00:39:46 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1204645256
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId-1204645256
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/15 00:39:46 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] Shutting down DataNode 1
    [junit] 12/03/15 00:39:46 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/15 00:39:46 INFO ipc.Server: Stopping server on 56453
    [junit] 12/03/15 00:39:46 INFO ipc.Server: IPC Server handler 0 on 56453: exiting
    [junit] 12/03/15 00:39:46 INFO ipc.Server: IPC Server handler 2 on 56453: exiting
    [junit] 12/03/15 00:39:46 INFO ipc.Server: IPC Server handler 1 on 56453: exiting
    [junit] 12/03/15 00:39:46 INFO ipc.Server: Stopping IPC Server listener on 56453
    [junit] 12/03/15 00:39:46 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:46 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/15 00:39:46 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/15 00:39:46 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:46574, storageID=DS-90420611-67.195.138.20-46574-1331771462300, infoPort=39006, ipcPort=56453):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/15 00:39:46 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/15 00:39:46 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Scheduling block blk_1495298642073345663_1102 file build/test/data/dfs/data/data1/current/blk_1495298642073345663 for deletion
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Scheduling block blk_2989927980216895302_1095 file build/test/data/dfs/data/data1/current/blk_2989927980216895302 for deletion
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Deleted block blk_1495298642073345663_1102 at file build/test/data/dfs/data/data1/current/blk_1495298642073345663
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Deleted block blk_2989927980216895302_1095 at file build/test/data/dfs/data/data1/current/blk_2989927980216895302
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:46574, storageID=DS-90420611-67.195.138.20-46574-1331771462300, infoPort=39006, ipcPort=56453):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping server on 56453
    [junit] 12/03/15 00:39:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/15 00:39:47 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/15 00:39:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/15 00:39:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1386308067
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1386308067
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] Shutting down DataNode 0
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/15 00:39:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/15 00:39:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping server on 37106
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 0 on 37106: exiting
    [junit] 12/03/15 00:39:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 2 on 37106: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 1 on 37106: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/15 00:39:47 WARN datanode.DataNode: DatanodeRegistration(127.0.0.1:53496, storageID=DS-163181747-67.195.138.20-53496-1331771461946, infoPort=33145, ipcPort=37106):DataXceiveServer:java.nio.channels.AsynchronousCloseException
    [junit] 	at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
    [junit] 	at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
    [junit] 	at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:131)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping IPC Server listener on 37106
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Exiting DataXceiveServer
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 1
    [junit] 12/03/15 00:39:47 INFO datanode.DataBlockScanner: Exiting DataBlockScanner thread.
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: DatanodeRegistration(127.0.0.1:53496, storageID=DS-163181747-67.195.138.20-53496-1331771461946, infoPort=33145, ipcPort=37106):Finishing DataNode in: FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
    [junit] 12/03/15 00:39:47 WARN util.MBeans: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=DataNodeInfo
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.unRegisterMXBean(DataNode.java:513)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:726)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1442)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping server on 37106
    [junit] 12/03/15 00:39:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:47 INFO datanode.DataNode: Waiting for threadgroup to exit, active threads is 0
    [junit] 12/03/15 00:39:47 INFO datanode.FSDatasetAsyncDiskService: Shutting down all async disk service threads...
    [junit] 12/03/15 00:39:47 INFO datanode.FSDatasetAsyncDiskService: All async disk service threads have been shut down.
    [junit] 12/03/15 00:39:47 WARN util.MBeans: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1093865431
    [junit] javax.management.InstanceNotFoundException: Hadoop:service=DataNode,name=FSDatasetState-UndefinedStorageId1093865431
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1094)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:415)
    [junit] 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:403)
    [junit] 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:506)
    [junit] 	at org.apache.hadoop.metrics2.util.MBeans.unregister(MBeans.java:71)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.FSDataset.shutdown(FSDataset.java:1934)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:788)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:566)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:550)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:87)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
    [junit] 	at org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
    [junit] 	at org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] 12/03/15 00:39:47 WARN datanode.FSDatasetAsyncDiskService: AsyncDiskService has already shut down.
    [junit] 12/03/15 00:39:47 INFO mortbay.log: Stopped SelectChannelConnector@localhost:0
    [junit] 12/03/15 00:39:47 INFO namenode.FSNamesystem: Number of transactions: 502 Total time for transactions(ms): 13Number of transactions batched in Syncs: 159 Number of syncs: 347 SyncTimes(ms): 3388 356 
    [junit] 12/03/15 00:39:47 WARN namenode.FSNamesystem: ReplicationMonitor thread received InterruptedException.java.lang.InterruptedException: sleep interrupted
    [junit] 12/03/15 00:39:47 INFO namenode.DecommissionManager: Interrupted Monitor
    [junit] java.lang.InterruptedException: sleep interrupted
    [junit] 	at java.lang.Thread.sleep(Native Method)
    [junit] 	at org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
    [junit] 	at java.lang.Thread.run(Thread.java:662)
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping server on 46858
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 2 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 0 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping IPC Server listener on 46858
    [junit] 12/03/15 00:39:47 INFO metrics.RpcInstrumentation: shut down
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 9 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 8 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 7 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 6 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 5 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 4 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 3 on 46858: exiting
    [junit] 12/03/15 00:39:47 INFO ipc.Server: Stopping IPC Server Responder
    [junit] 12/03/15 00:39:47 INFO ipc.Server: IPC Server handler 1 on 46858: exiting
    [junit] Tests run: 17, Failures: 3, Errors: 3, Time elapsed: 519.101 sec
    [junit] Test org.apache.pig.test.TestStore FAILED
    [junit] Running org.apache.pig.test.TestStringUDFs
    [junit] 12/03/15 00:39:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
    [junit] 12/03/15 00:39:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/15 00:39:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -1
    [junit] 12/03/15 00:39:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -8
    [junit] 12/03/15 00:39:48 WARN builtin.SUBSTRING: No logger object provided to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.StringIndexOutOfBoundsException: String index out of range: -2
    [junit] 12/03/15 00:39:48 WARN builtin.INDEXOF: No logger object provided to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
    [junit] 12/03/15 00:39:48 WARN builtin.LAST_INDEX_OF: No logger object provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input; error - null
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.106 sec
   [delete] Deleting directory /tmp/pig_junit_tmp1380020235

BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:780: The following error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:835: Tests failed!

Total time: 22 minutes 47 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints

Build failed in Jenkins: Pig-trunk #1203

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Pig-trunk/1203/changes>

Changes:

[daijy] PIG-2543: PigStats.isSuccessful returns false if embedded pig script has sh commands

------------------------------------------
[...truncated 6194 lines...]
 [findbugs]   org.mozilla.javascript.NativeJavaObject
 [findbugs]   jline.ConsoleReaderInputStream
 [findbugs]   org.apache.log4j.PropertyConfigurator
 [findbugs]   org.apache.hadoop.mapred.TaskID
 [findbugs]   org.apache.commons.cli.CommandLine
 [findbugs]   org.python.core.Py
 [findbugs]   org.apache.hadoop.io.BooleanWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable
 [findbugs]   org.antlr.runtime.BitSet
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.Job
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter$CompareOp
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader
 [findbugs]   org.mozilla.javascript.NativeFunction
 [findbugs]   org.apache.hadoop.mapreduce.Counter
 [findbugs]   org.codehaus.jackson.JsonEncoding
 [findbugs]   org.codehaus.jackson.JsonParseException
 [findbugs]   org.python.core.PyCode
 [findbugs]   com.jcraft.jsch.HostKey
 [findbugs]   org.apache.hadoop.hbase.filter.Filter
 [findbugs]   org.apache.commons.logging.Log
 [findbugs]   com.google.common.util.concurrent.ListenableFuture
 [findbugs]   org.apache.hadoop.util.RunJar
 [findbugs]   org.apache.hadoop.mapred.Counters$Group
 [findbugs]   com.jcraft.jsch.ChannelExec
 [findbugs]   org.apache.hadoop.hbase.util.Base64
 [findbugs]   org.antlr.runtime.TokenStream
 [findbugs]   org.apache.hadoop.io.IOUtils
 [findbugs]   com.google.common.util.concurrent.CheckedFuture
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Reader$Scanner$Entry
 [findbugs]   org.apache.hadoop.fs.FSDataInputStream
 [findbugs]   org.python.core.PyObject
 [findbugs]   jline.History
 [findbugs]   org.apache.hadoop.io.BooleanWritable
 [findbugs]   org.apache.log4j.Logger
 [findbugs]   org.apache.hadoop.hbase.filter.FamilyFilter
 [findbugs]   org.antlr.runtime.IntStream
 [findbugs]   org.apache.hadoop.util.ReflectionUtils
 [findbugs]   org.apache.hadoop.fs.ContentSummary
 [findbugs]   org.python.core.PyTuple
 [findbugs]   org.apache.hadoop.conf.Configuration
 [findbugs]   com.google.common.base.Joiner
 [findbugs]   org.apache.hadoop.mapreduce.lib.input.FileSplit
 [findbugs]   org.apache.hadoop.mapred.Counters$Counter
 [findbugs]   com.jcraft.jsch.Channel
 [findbugs]   org.apache.hadoop.mapred.JobPriority
 [findbugs]   org.apache.commons.cli.Options
 [findbugs]   org.apache.hadoop.mapred.JobID
 [findbugs]   org.apache.hadoop.util.bloom.BloomFilter
 [findbugs]   org.python.core.PyFrame
 [findbugs]   org.apache.hadoop.hbase.filter.CompareFilter
 [findbugs]   org.apache.hadoop.util.VersionInfo
 [findbugs]   org.python.core.PyString
 [findbugs]   org.apache.hadoop.io.Text$Comparator
 [findbugs]   org.antlr.runtime.MismatchedSetException
 [findbugs]   org.apache.hadoop.io.BytesWritable
 [findbugs]   org.apache.hadoop.fs.FsShell
 [findbugs]   org.mozilla.javascript.ImporterTopLevel
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableOutputFormat
 [findbugs]   org.apache.hadoop.mapred.TaskReport
 [findbugs]   org.antlr.runtime.tree.RewriteRuleSubtreeStream
 [findbugs]   org.apache.commons.cli.HelpFormatter
 [findbugs]   org.mozilla.javascript.NativeObject
 [findbugs]   org.apache.hadoop.hbase.HConstants
 [findbugs]   org.apache.hadoop.io.serializer.Deserializer
 [findbugs]   org.antlr.runtime.FailedPredicateException
 [findbugs]   org.apache.hadoop.io.compress.CompressionCodec
 [findbugs]   org.apache.hadoop.fs.FileStatus
 [findbugs]   org.apache.hadoop.hbase.client.Result
 [findbugs]   org.apache.hadoop.mapreduce.JobContext
 [findbugs]   org.codehaus.jackson.JsonGenerator
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptContext
 [findbugs]   org.apache.hadoop.io.BytesWritable$Comparator
 [findbugs]   org.apache.hadoop.io.LongWritable$Comparator
 [findbugs]   org.codehaus.jackson.map.util.LRUMap
 [findbugs]   org.apache.hadoop.hbase.util.Bytes
 [findbugs]   org.antlr.runtime.MismatchedTokenException
 [findbugs]   org.codehaus.jackson.JsonParser
 [findbugs]   com.jcraft.jsch.UserInfo
 [findbugs]   org.python.core.PyException
 [findbugs]   org.apache.commons.cli.ParseException
 [findbugs]   org.apache.hadoop.io.compress.CompressionOutputStream
 [findbugs]   org.apache.hadoop.hbase.filter.WritableByteArrayComparable
 [findbugs]   org.antlr.runtime.tree.CommonTreeNodeStream
 [findbugs]   org.apache.log4j.Level
 [findbugs]   org.apache.hadoop.hbase.client.Scan
 [findbugs]   org.apache.hadoop.mapreduce.Job
 [findbugs]   com.google.common.util.concurrent.Futures
 [findbugs]   org.apache.commons.logging.LogFactory
 [findbugs]   org.apache.commons.codec.binary.Base64
 [findbugs]   org.codehaus.jackson.map.ObjectMapper
 [findbugs]   org.apache.hadoop.fs.FileSystem
 [findbugs]   org.apache.hadoop.hbase.filter.FilterList$Operator
 [findbugs]   org.apache.hadoop.hbase.io.ImmutableBytesWritable
 [findbugs]   org.apache.hadoop.io.serializer.SerializationFactory
 [findbugs]   org.antlr.runtime.tree.TreeAdaptor
 [findbugs]   org.apache.hadoop.mapred.RunningJob
 [findbugs]   org.antlr.runtime.CommonTokenStream
 [findbugs]   org.apache.hadoop.io.DataInputBuffer
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile
 [findbugs]   org.apache.commons.cli.GnuParser
 [findbugs]   org.mozilla.javascript.Context
 [findbugs]   org.apache.hadoop.io.FloatWritable
 [findbugs]   org.antlr.runtime.tree.RewriteEarlyExitException
 [findbugs]   org.apache.hadoop.hbase.HBaseConfiguration
 [findbugs]   org.codehaus.jackson.JsonGenerationException
 [findbugs]   org.apache.hadoop.mapreduce.TaskInputOutputContext
 [findbugs]   org.apache.hadoop.io.compress.GzipCodec
 [findbugs]   org.apache.hadoop.mapred.jobcontrol.JobControl
 [findbugs]   org.antlr.runtime.BaseRecognizer
 [findbugs]   org.apache.hadoop.fs.FileUtil
 [findbugs]   org.apache.hadoop.fs.Path
 [findbugs]   org.apache.hadoop.hbase.client.Put
 [findbugs]   org.apache.hadoop.io.file.tfile.TFile$Writer
 [findbugs]   jline.ConsoleReader
 [findbugs]   com.google.common.collect.Lists
 [findbugs]   org.apache.hadoop.mapreduce.MapContext
 [findbugs]   org.python.core.PyJavaPackage
 [findbugs]   org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 [findbugs]   org.python.core.PyStringMap
 [findbugs]   org.apache.hadoop.mapreduce.TaskID
 [findbugs]   org.apache.hadoop.hbase.client.HTable
 [findbugs]   org.apache.hadoop.io.FloatWritable$Comparator
 [findbugs]   org.apache.zookeeper.ZooKeeper
 [findbugs]   org.codehaus.jackson.map.JsonMappingException
 [findbugs]   org.python.core.PyFunction
 [findbugs]   org.antlr.runtime.TokenSource
 [findbugs]   com.jcraft.jsch.ChannelDirectTCPIP
 [findbugs]   com.jcraft.jsch.JSchException
 [findbugs]   org.python.util.PythonInterpreter
 [findbugs]   org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 [findbugs]   org.python.core.PyInteger
 [findbugs]   org.apache.hadoop.mapred.JobConf
 [findbugs]   org.apache.hadoop.util.bloom.Key
 [findbugs]   org.apache.hadoop.io.Text
 [findbugs]   org.antlr.runtime.NoViableAltException
 [findbugs]   org.apache.hadoop.util.GenericOptionsParser
 [findbugs]   org.apache.hadoop.mapreduce.JobID
 [findbugs]   org.apache.hadoop.mapreduce.TaskAttemptID
 [findbugs]   org.apache.hadoop.filecache.DistributedCache
 [findbugs]   org.apache.hadoop.fs.FSDataOutputStream
 [findbugs]   org.python.core.PyList
 [findbugs]   org.antlr.runtime.tree.TreeNodeStream
 [findbugs]   org.apache.hadoop.hbase.filter.BinaryComparator
 [findbugs]   dk.brics.automaton.RegExp
 [findbugs]   org.mozilla.javascript.Scriptable
 [findbugs]   org.mozilla.javascript.EcmaError
 [findbugs]   org.apache.hadoop.io.serializer.Serializer
 [findbugs]   org.apache.hadoop.util.bloom.Filter
 [findbugs]   org.python.core.PyNone
 [findbugs]   org.mozilla.javascript.Function
 [findbugs]   org.python.core.PySystemState
 [findbugs]   org.antlr.runtime.RecognizerSharedState
 [findbugs]   org.codehaus.jackson.JsonFactory
 [findbugs]   org.antlr.runtime.EarlyExitException
 [findbugs]   org.apache.hadoop.hdfs.DistributedFileSystem
 [findbugs]   org.apache.hadoop.util.LineReader
 [findbugs] Warnings generated: 24
 [findbugs] Missing classes: 233
 [findbugs] Calculating exit code...
 [findbugs] Setting 'missing class' flag (2)
 [findbugs] Setting 'bugs found' flag (1)
 [findbugs] Exit code set to: 3
 [findbugs] Java Result: 3
 [findbugs] Classes needed for analysis were missing
 [findbugs] Output saved to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml>
     [xslt] Processing <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.xml> to <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/findbugs/pig-findbugs-report.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl

BUILD SUCCESSFUL
Total time: 5 minutes 22 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================




======================================================================
======================================================================
CLEAN: cleaning workspace
======================================================================
======================================================================


Buildfile: build.xml

clean:
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src-gen>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/src/docs/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/build>
   [delete] Deleting directory <https://builds.apache.org/job/Pig-trunk/ws/trunk/test/org/apache/pig/test/utils/dotGraph/parser>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig.jar>
   [delete] Deleting: <https://builds.apache.org/job/Pig-trunk/ws/trunk/pig-withouthadoop.jar>

clean:

clean:

BUILD SUCCESSFUL
Total time: 0 seconds


======================================================================
======================================================================
ANALYSIS: ant -Drun.clover=true -Dclover.home=/homes/hudson/tools/clover/latest clover test-commit generate-clover-reports -Dtest.junit.output.format=xml -Dtest.output=yes -Dversion=${BUILD_ID} -Dfindbugs.home=$FINDBUGS_HOME -Djava5.home=$JAVA5_HOME -Dforrest.home=$FORREST_HOME -Dclover.home=$CLOVER_HOME -Declipse.home=$ECLIPSE_HOME
======================================================================
======================================================================


Buildfile: build.xml

clover.setup:
    [mkdir] Created dir: <https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/clover/db>
[clover-setup] Clover Version 3.1.0, built on May 31 2011 (build-821)
[clover-setup] Loaded from: /home/jenkins/tools/clover/latest/lib/clover.jar

BUILD FAILED
java.lang.RuntimeException: Clover upgrades for your license ended December 14 2010, and this version of Clover was built May 31 2011. Please visit http://www.atlassian.com/clover/renew for information on upgrading your license.
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:103)
	at com.cenqua.clover.CloverStartup.loadLicense(CloverStartup.java:25)
	at com.cenqua.clover.tasks.AbstractCloverTask.execute(AbstractCloverTask.java:52)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:288)
	at sun.reflect.GeneratedMethodAccessor1.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:357)
	at org.apache.tools.ant.Target.performTasks(Target.java:385)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1337)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1306)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1189)
	at org.apache.tools.ant.Main.runBuild(Main.java:758)
	at org.apache.tools.ant.Main.startAnt(Main.java:217)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:257)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:104)

Total time: 0 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints