You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/09/16 09:54:10 UTC

Build failed in Jenkins: carbondata-master-spark-2.1 #3739

See <https://builds.apache.org/job/carbondata-master-spark-2.1/3739/display/redirect>

------------------------------------------
[...truncated 28.68 MB...]
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1951)
	at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:127)
	... 123 more
Caused by: java.io.FileNotFoundException: /tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:99)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"DROP DATAMAP","opId":"25263813974328030","opStatus":"START"}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"DROP DATAMAP","opId":"25263813974328030","opStatus":"SUCCESS","opTime":"113 ms","table":"partition_mv.partitionone","extraInfo":{"dmName":"dm1"}}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"CREATE DATAMAP","opId":"25263813208749813","opStatus":"FAILED","opTime":"878 ms","table":"partition_mv.partitionone","extraInfo":{"Exception":"org.apache.spark.SparkException","Message":"Job aborted."}}
- test partition at last column *** FAILED ***
  org.apache.spark.SparkException: Job aborted.
  at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:147)
  at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:121)
  at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:121)
  at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
  at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:121)
  at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:101)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
  Cause: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 2544.0 failed 1 times, most recent failure: Lost task 0.0 in stage 2544.0 (TID 45181, localhost, executor driver): java.io.FileNotFoundException: /tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:99)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
  at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  at scala.Option.foreach(Option.scala:257)
  at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
  ...
  Cause: java.io.FileNotFoundException: /tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a (No such file or directory)
  at java.io.FileOutputStream.open0(Native Method)
  at java.io.FileOutputStream.open(FileOutputStream.java:270)
  at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
  at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
  at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
  at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
  at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
  at org.apache.spark.scheduler.Task.run(Task.scala:99)
  ...
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263814093098170","opStatus":"START"}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263814093098170","opStatus":"SUCCESS","opTime":"1 ms","table":"partition_mv.ag_table","extraInfo":{}}
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263814094301120","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263814094301120","opStatus":"SUCCESS","opTime":"941 ms","table":"partition_mv.droppartition","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815034712090","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815034712090","opStatus":"SUCCESS","opTime":"486 ms","table":"partition_mv.maintable","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815521304869","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815521304869","opStatus":"SUCCESS","opTime":"110 ms","table":"partition_mv.par","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815631449447","opStatus":"START"}
2019-09-16 09:53:01 AUDIT audit:93 - {"time":"September 16, 2019 2:53:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263815631449447","opStatus":"SUCCESS","opTime":"634 ms","table":"partition_mv.partitionallcompaction","extraInfo":{}}
2019-09-16 09:53:01 AUDIT audit:72 - {"time":"September 16, 2019 2:53:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263816266068790","opStatus":"START"}
2019-09-16 09:53:01 AUDIT audit:93 - {"time":"September 16, 2019 2:53:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263816266068790","opStatus":"SUCCESS","opTime":"199 ms","table":"partition_mv.partitionone","extraInfo":{}}
2019-09-16 09:53:01 AUDIT audit:72 - {"time":"September 16, 2019 2:53:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263816465636116","opStatus":"START"}
2019-09-16 09:53:02 AUDIT audit:93 - {"time":"September 16, 2019 2:53:02 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263816465636116","opStatus":"SUCCESS","opTime":"867 ms","table":"partition_mv.partitiontable","extraInfo":{}}
2019-09-16 09:53:02 AUDIT audit:72 - {"time":"September 16, 2019 2:53:02 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263817333019204","opStatus":"START"}
2019-09-16 09:53:02 AUDIT audit:93 - {"time":"September 16, 2019 2:53:02 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263817333019204","opStatus":"SUCCESS","opTime":"1 ms","table":"partition_mv.sensor_1_table","extraInfo":{}}
2019-09-16 09:53:02 AUDIT audit:72 - {"time":"September 16, 2019 2:53:02 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263817334304590","opStatus":"START"}
2019-09-16 09:53:03 AUDIT audit:93 - {"time":"September 16, 2019 2:53:03 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"25263817334304590","opStatus":"SUCCESS","opTime":"800 ms","table":"partition_mv.updatetime_8","extraInfo":{}}
MVTPCDSTestCase:
- test create datamap with tpcds_1_4_testCases case_1 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_3 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_4 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_5 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_6 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_8 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_11 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_15 !!! IGNORED !!!
- test create datamap with tpcds_1_4_testCases case_16 !!! IGNORED !!!
Run completed in 13 minutes, 9 seconds.
Total number of tests run: 194
Suites: completed 19, aborted 0
Tests: succeeded 181, failed 13, canceled 0, ignored 27, pending 0
*** 13 TESTS FAILED ***
[JENKINS] Recording test results
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache CarbonData :: Parent ........................ SUCCESS [ 11.732 s]
[INFO] Apache CarbonData :: Common ........................ SUCCESS [ 15.576 s]
[INFO] Apache CarbonData :: Core .......................... SUCCESS [02:54 min]
[INFO] Apache CarbonData :: Processing .................... SUCCESS [ 32.007 s]
[INFO] Apache CarbonData :: Hadoop ........................ SUCCESS [ 25.418 s]
[INFO] Apache CarbonData :: Hive .......................... SUCCESS [ 32.780 s]
[INFO] Apache CarbonData :: Streaming ..................... SUCCESS [ 27.381 s]
[INFO] Apache CarbonData :: Store SDK ..................... SUCCESS [01:09 min]
[INFO] Apache CarbonData :: Spark Datasource .............. SUCCESS [03:40 min]
[INFO] Apache CarbonData :: Spark Common .................. SUCCESS [01:31 min]
[INFO] Apache CarbonData :: CLI ........................... SUCCESS [05:48 min]
[INFO] Apache CarbonData :: Lucene Index DataMap .......... SUCCESS [ 18.332 s]
[INFO] Apache CarbonData :: Bloom Index DataMap ........... SUCCESS [ 15.749 s]
[INFO] Apache CarbonData :: Spark2 ........................ SUCCESS [23:43 min]
[INFO] Apache CarbonData :: Spark Common Test ............. SUCCESS [  01:23 h]
[INFO] Apache CarbonData :: DataMap Examples .............. SUCCESS [ 15.655 s]
[INFO] Apache CarbonData :: Materialized View Plan ........ SUCCESS [01:38 min]
[INFO] Apache CarbonData :: Materialized View Core ........ FAILURE [14:22 min]
[INFO] Apache CarbonData :: Assembly ...................... SKIPPED
[INFO] Apache CarbonData :: Examples ...................... SKIPPED
[INFO] Apache CarbonData :: presto ........................ SKIPPED
[INFO] Apache CarbonData :: Flink Examples ................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:22 h
[INFO] Finished at: 2019-09-16T09:53:29+00:00
[INFO] Final Memory: 158M/1701M
[INFO] ------------------------------------------------------------------------
Waiting for Jenkins to finish collecting data
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project carbondata-mv-core: There are test failures -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project carbondata-mv-core: There are test failures
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:212)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:116)
	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:80)
	at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build(SingleThreadedBuilder.java:51)
	at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:128)
	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:307)
	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:193)
	at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:106)
	at org.jvnet.hudson.maven3.launcher.Maven33Launcher.main(Maven33Launcher.java:129)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.codehaus.plexus.classworlds.launcher.Launcher.launchStandard(Launcher.java:330)
	at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:238)
	at jenkins.maven3.agent.Maven33Main.launch(Maven33Main.java:176)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at hudson.maven.Maven3Builder.call(Maven3Builder.java:139)
	at hudson.maven.Maven3Builder.call(Maven3Builder.java:70)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures
	at org.scalatest.tools.maven.TestMojo.execute(TestMojo.java:107)
	at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:134)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:207)
	... 31 more
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :carbondata-mv-core
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/processing/pom.xml> to org.apache.carbondata/carbondata-processing/1.6.0-SNAPSHOT/carbondata-processing-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/processing/target/carbondata-processing-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-processing/1.6.0-SNAPSHOT/carbondata-processing-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/examples/pom.xml> to org.apache.carbondata/carbondata-datamap-examples/1.6.0-SNAPSHOT/carbondata-datamap-examples-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/examples/target/carbondata-datamap-examples-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-datamap-examples/1.6.0-SNAPSHOT/carbondata-datamap-examples-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/core/pom.xml> to org.apache.carbondata/carbondata-mv-core/1.6.0-SNAPSHOT/carbondata-mv-core-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/streaming/pom.xml> to org.apache.carbondata/carbondata-streaming/1.6.0-SNAPSHOT/carbondata-streaming-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/streaming/target/carbondata-streaming-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-streaming/1.6.0-SNAPSHOT/carbondata-streaming-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common/pom.xml> to org.apache.carbondata/carbondata-spark-common/1.6.0-SNAPSHOT/carbondata-spark-common-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common/target/carbondata-spark-common-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-common/1.6.0-SNAPSHOT/carbondata-spark-common-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/lucene/pom.xml> to org.apache.carbondata/carbondata-lucene/1.6.0-SNAPSHOT/carbondata-lucene-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/lucene/target/carbondata-lucene-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-lucene/1.6.0-SNAPSHOT/carbondata-lucene-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/examples/spark2/pom.xml> to org.apache.carbondata/carbondata-examples/1.6.0-SNAPSHOT/carbondata-examples-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/hive/pom.xml> to org.apache.carbondata/carbondata-hive/1.6.0-SNAPSHOT/carbondata-hive-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/hive/target/carbondata-hive-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-hive/1.6.0-SNAPSHOT/carbondata-hive-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/common/pom.xml> to org.apache.carbondata/carbondata-common/1.6.0-SNAPSHOT/carbondata-common-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/common/target/carbondata-common-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-common/1.6.0-SNAPSHOT/carbondata-common-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/plan/pom.xml> to org.apache.carbondata/carbondata-mv-plan/1.6.0-SNAPSHOT/carbondata-mv-plan-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/plan/target/carbondata-mv-plan-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-mv-plan/1.6.0-SNAPSHOT/carbondata-mv-plan-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark2/pom.xml> to org.apache.carbondata/carbondata-spark2/1.6.0-SNAPSHOT/carbondata-spark2-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark2/target/carbondata-spark2-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark2/1.6.0-SNAPSHOT/carbondata-spark2-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/tools/cli/pom.xml> to org.apache.carbondata/carbondata-cli/1.6.0-SNAPSHOT/carbondata-cli-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/tools/cli/target/carbondata-cli-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-cli/1.6.0-SNAPSHOT/carbondata-cli-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/bloom/pom.xml> to org.apache.carbondata/carbondata-bloom/1.6.0-SNAPSHOT/carbondata-bloom-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/bloom/target/carbondata-bloom-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-bloom/1.6.0-SNAPSHOT/carbondata-bloom-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/assembly/pom.xml> to org.apache.carbondata/carbondata-assembly/1.6.0-SNAPSHOT/carbondata-assembly-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/pom.xml> to org.apache.carbondata/carbondata-parent/1.6.0-SNAPSHOT/carbondata-parent-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/examples/flink/pom.xml> to org.apache.carbondata/carbondata-examples-flink/1.6.0-SNAPSHOT/carbondata-examples-flink-1.6.0-SNAPSHOT.pom
[Fast Archiver] No prior successful build to compare, so performing full copy of artifacts
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/core/pom.xml> to org.apache.carbondata/carbondata-core/1.6.0-SNAPSHOT/carbondata-core-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/core/target/carbondata-core-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-core/1.6.0-SNAPSHOT/carbondata-core-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/hadoop/pom.xml> to org.apache.carbondata/carbondata-hadoop/1.6.0-SNAPSHOT/carbondata-hadoop-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/hadoop/target/carbondata-hadoop-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-hadoop/1.6.0-SNAPSHOT/carbondata-hadoop-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-datasource/pom.xml> to org.apache.carbondata/carbondata-spark-datasource/1.6.0-SNAPSHOT/carbondata-spark-datasource-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-datasource/target/carbondata-spark-datasource-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-datasource/1.6.0-SNAPSHOT/carbondata-spark-datasource-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/presto/pom.xml> to org.apache.carbondata/carbondata-presto/1.6.0-SNAPSHOT/carbondata-presto-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common-test/pom.xml> to org.apache.carbondata/carbondata-spark-common-test/1.6.0-SNAPSHOT/carbondata-spark-common-test-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common-test/target/carbondata-spark-common-test-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-common-test/1.6.0-SNAPSHOT/carbondata-spark-common-test-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/store/sdk/pom.xml> to org.apache.carbondata/carbondata-store-sdk/1.6.0-SNAPSHOT/carbondata-store-sdk-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/store/sdk/target/carbondata-store-sdk-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-store-sdk/1.6.0-SNAPSHOT/carbondata-store-sdk-1.6.0-SNAPSHOT.jar
Sending e-mails to: commits@carbondata.apache.org
Sending e-mails to: commits@carbondata.apache.org ravi.pesala@gmail.com
Sending e-mails to: commits@carbondata.apache.org
Sending e-mails to: commits@carbondata.apache.org
channel stopped
Not sending mail to unregistered user 441586683@qq.com
Not sending mail to unregistered user carbondatacontributions@gmail.com
Not sending mail to unregistered user jacky.likun@qq.com
Not sending mail to unregistered user kunalkapoor642@gmail.com
Not sending mail to unregistered user vikramahuja8803@gmail.com

Build failed in Jenkins: carbondata-master-spark-2.1 #3740

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/3740/display/redirect?page=changes>

Changes:

[kumarvishal09] [CARBONDATA-3454] optimized index server output for count(*)

------------------------------------------
[...truncated 12.76 MB...]
|robot3|  3|   1.5|
|robot4|  4|   2.0|
|robot5|  5|   2.5|
|robot6|  6|   3.0|
|robot7|  7|   3.5|
|robot8|  8|   4.0|
|robot9|  9|   4.5|
+------+---+------+

- DirectSQLExample
2019-09-16 19:58:05 AUDIT audit:72 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"25300119797800827","opStatus":"START"}
2019-09-16 19:58:05 AUDIT audit:93 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"25300119797800827","opStatus":"SUCCESS","opTime":"85 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-09-16 19:58:05 AUDIT audit:72 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300119999200811","opStatus":"START"}
2019-09-16 19:58:05 AUDIT audit:93 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300119999200811","opStatus":"SUCCESS","opTime":"217 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"924.0B","IndexSize":"551.0B"}}
2019-09-16 19:58:05 AUDIT audit:72 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300120227300976","opStatus":"START"}
2019-09-16 19:58:05 AUDIT audit:93 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300120227300976","opStatus":"SUCCESS","opTime":"152 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"1","DataSize":"924.0B","IndexSize":"551.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

2019-09-16 19:58:05 AUDIT audit:72 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"25300120500083510","opStatus":"START"}
2019-09-16 19:58:05 AUDIT audit:93 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"25300120500083510","opStatus":"SUCCESS","opTime":"67 ms","table":"default.test_boundary","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":"","dictionary_include":"c6_timestamp"}}
2019-09-16 19:58:05 AUDIT audit:72 - {"time":"September 16, 2019 12:58:05 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300120572260264","opStatus":"START"}
2019-09-16 19:58:06 ERROR DataLoadExecutor:55 - Data Load is partially success for table test_boundary
2019-09-16 19:58:06 AUDIT audit:93 - {"time":"September 16, 2019 12:58:06 PM PDT","username":"jenkins","opName":"LOAD DATA","opId":"25300120572260264","opStatus":"SUCCESS","opTime":"279 ms","table":"default.test_boundary","extraInfo":{"SegmentId":"0","DataSize":"4.03KB","IndexSize":"1.06KB"}}
OK
******Total Number Of Rows Fetched ****** 0
- HiveExample *** FAILED ***
  java.lang.AssertionError: assertion failed
  at scala.Predef$.assert(Predef.scala:156)
  at org.apache.carbondata.examples.HiveExample$.readFromHive(HiveExample.scala:171)
  at org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply$mcV$sp(RunExamples.scala:135)
  at org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply(RunExamples.scala:133)
  at org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply(RunExamples.scala:133)
  at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
  at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
  at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
  at org.scalatest.Transformer.apply(Transformer.scala:22)
  at org.scalatest.Transformer.apply(Transformer.scala:20)
  ...
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 40 seconds.
Total number of tests run: 21
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 1, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
*** 1 TEST FAILED ***
[JENKINS] Recording test results
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache CarbonData :: Parent ........................ SUCCESS [  9.803 s]
[INFO] Apache CarbonData :: Common ........................ SUCCESS [ 14.498 s]
[INFO] Apache CarbonData :: Core .......................... SUCCESS [02:57 min]
[INFO] Apache CarbonData :: Processing .................... SUCCESS [ 33.712 s]
[INFO] Apache CarbonData :: Hadoop ........................ SUCCESS [ 24.903 s]
[INFO] Apache CarbonData :: Hive .......................... SUCCESS [ 29.677 s]
[INFO] Apache CarbonData :: Streaming ..................... SUCCESS [ 25.923 s]
[INFO] Apache CarbonData :: Store SDK ..................... SUCCESS [01:01 min]
[INFO] Apache CarbonData :: Spark Datasource .............. SUCCESS [04:45 min]
[INFO] Apache CarbonData :: Spark Common .................. SUCCESS [01:31 min]
[INFO] Apache CarbonData :: CLI ........................... SUCCESS [06:11 min]
[INFO] Apache CarbonData :: Lucene Index DataMap .......... SUCCESS [ 17.880 s]
[INFO] Apache CarbonData :: Bloom Index DataMap ........... SUCCESS [ 15.755 s]
[INFO] Apache CarbonData :: Spark2 ........................ SUCCESS [24:47 min]
[INFO] Apache CarbonData :: Spark Common Test ............. SUCCESS [  01:21 h]
[INFO] Apache CarbonData :: DataMap Examples .............. SUCCESS [ 14.769 s]
[INFO] Apache CarbonData :: Materialized View Plan ........ SUCCESS [01:45 min]
[INFO] Apache CarbonData :: Materialized View Core ........ SUCCESS [14:46 min]
[INFO] Apache CarbonData :: Assembly ...................... SUCCESS [ 25.398 s]
[INFO] Apache CarbonData :: Examples ...................... FAILURE [03:43 min]
[INFO] Apache CarbonData :: presto ........................ SKIPPED
[INFO] Apache CarbonData :: Flink Examples ................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:27 h
[INFO] Finished at: 2019-09-16T19:58:56+00:00
[INFO] Final Memory: 159M/1760M
[INFO] ------------------------------------------------------------------------
Waiting for Jenkins to finish collecting data
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project carbondata-examples: There are test failures -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project carbondata-examples: There are test failures
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:212)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:116)
	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:80)
	at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build(SingleThreadedBuilder.java:51)
	at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:128)
	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:307)
	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:193)
	at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:106)
	at org.jvnet.hudson.maven3.launcher.Maven33Launcher.main(Maven33Launcher.java:129)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.codehaus.plexus.classworlds.launcher.Launcher.launchStandard(Launcher.java:330)
	at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:238)
	at jenkins.maven3.agent.Maven33Main.launch(Maven33Main.java:176)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at hudson.maven.Maven3Builder.call(Maven3Builder.java:139)
	at hudson.maven.Maven3Builder.call(Maven3Builder.java:70)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures
	at org.scalatest.tools.maven.TestMojo.execute(TestMojo.java:107)
	at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:134)
	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:207)
	... 31 more
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :carbondata-examples
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/processing/pom.xml> to org.apache.carbondata/carbondata-processing/1.6.0-SNAPSHOT/carbondata-processing-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/processing/target/carbondata-processing-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-processing/1.6.0-SNAPSHOT/carbondata-processing-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/examples/pom.xml> to org.apache.carbondata/carbondata-datamap-examples/1.6.0-SNAPSHOT/carbondata-datamap-examples-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/examples/target/carbondata-datamap-examples-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-datamap-examples/1.6.0-SNAPSHOT/carbondata-datamap-examples-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/core/pom.xml> to org.apache.carbondata/carbondata-mv-core/1.6.0-SNAPSHOT/carbondata-mv-core-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/core/target/carbondata-mv-core-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-mv-core/1.6.0-SNAPSHOT/carbondata-mv-core-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/streaming/pom.xml> to org.apache.carbondata/carbondata-streaming/1.6.0-SNAPSHOT/carbondata-streaming-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/streaming/target/carbondata-streaming-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-streaming/1.6.0-SNAPSHOT/carbondata-streaming-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common/pom.xml> to org.apache.carbondata/carbondata-spark-common/1.6.0-SNAPSHOT/carbondata-spark-common-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common/target/carbondata-spark-common-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-common/1.6.0-SNAPSHOT/carbondata-spark-common-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/lucene/pom.xml> to org.apache.carbondata/carbondata-lucene/1.6.0-SNAPSHOT/carbondata-lucene-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/lucene/target/carbondata-lucene-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-lucene/1.6.0-SNAPSHOT/carbondata-lucene-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/examples/spark2/pom.xml> to org.apache.carbondata/carbondata-examples/1.6.0-SNAPSHOT/carbondata-examples-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/hive/pom.xml> to org.apache.carbondata/carbondata-hive/1.6.0-SNAPSHOT/carbondata-hive-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/hive/target/carbondata-hive-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-hive/1.6.0-SNAPSHOT/carbondata-hive-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/common/pom.xml> to org.apache.carbondata/carbondata-common/1.6.0-SNAPSHOT/carbondata-common-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/common/target/carbondata-common-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-common/1.6.0-SNAPSHOT/carbondata-common-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/plan/pom.xml> to org.apache.carbondata/carbondata-mv-plan/1.6.0-SNAPSHOT/carbondata-mv-plan-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/mv/plan/target/carbondata-mv-plan-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-mv-plan/1.6.0-SNAPSHOT/carbondata-mv-plan-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark2/pom.xml> to org.apache.carbondata/carbondata-spark2/1.6.0-SNAPSHOT/carbondata-spark2-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark2/target/carbondata-spark2-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark2/1.6.0-SNAPSHOT/carbondata-spark2-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/tools/cli/pom.xml> to org.apache.carbondata/carbondata-cli/1.6.0-SNAPSHOT/carbondata-cli-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/tools/cli/target/carbondata-cli-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-cli/1.6.0-SNAPSHOT/carbondata-cli-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/bloom/pom.xml> to org.apache.carbondata/carbondata-bloom/1.6.0-SNAPSHOT/carbondata-bloom-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/datamap/bloom/target/carbondata-bloom-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-bloom/1.6.0-SNAPSHOT/carbondata-bloom-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/assembly/pom.xml> to org.apache.carbondata/carbondata-assembly/1.6.0-SNAPSHOT/carbondata-assembly-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/pom.xml> to org.apache.carbondata/carbondata-parent/1.6.0-SNAPSHOT/carbondata-parent-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/examples/flink/pom.xml> to org.apache.carbondata/carbondata-examples-flink/1.6.0-SNAPSHOT/carbondata-examples-flink-1.6.0-SNAPSHOT.pom
[Fast Archiver] No prior successful build to compare, so performing full copy of artifacts
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/core/pom.xml> to org.apache.carbondata/carbondata-core/1.6.0-SNAPSHOT/carbondata-core-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/core/target/carbondata-core-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-core/1.6.0-SNAPSHOT/carbondata-core-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/hadoop/pom.xml> to org.apache.carbondata/carbondata-hadoop/1.6.0-SNAPSHOT/carbondata-hadoop-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/hadoop/target/carbondata-hadoop-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-hadoop/1.6.0-SNAPSHOT/carbondata-hadoop-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-datasource/pom.xml> to org.apache.carbondata/carbondata-spark-datasource/1.6.0-SNAPSHOT/carbondata-spark-datasource-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-datasource/target/carbondata-spark-datasource-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-datasource/1.6.0-SNAPSHOT/carbondata-spark-datasource-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/presto/pom.xml> to org.apache.carbondata/carbondata-presto/1.6.0-SNAPSHOT/carbondata-presto-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common-test/pom.xml> to org.apache.carbondata/carbondata-spark-common-test/1.6.0-SNAPSHOT/carbondata-spark-common-test-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/integration/spark-common-test/target/carbondata-spark-common-test-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-spark-common-test/1.6.0-SNAPSHOT/carbondata-spark-common-test-1.6.0-SNAPSHOT.jar
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/store/sdk/pom.xml> to org.apache.carbondata/carbondata-store-sdk/1.6.0-SNAPSHOT/carbondata-store-sdk-1.6.0-SNAPSHOT.pom
[JENKINS] Archiving <https://builds.apache.org/job/carbondata-master-spark-2.1/ws/store/sdk/target/carbondata-store-sdk-1.6.0-SNAPSHOT.jar> to org.apache.carbondata/carbondata-store-sdk/1.6.0-SNAPSHOT/carbondata-store-sdk-1.6.0-SNAPSHOT.jar
Sending e-mails to: commits@carbondata.apache.org
Sending e-mails to: commits@carbondata.apache.org
Sending e-mails to: commits@carbondata.apache.org kumarvishal1802@gmail.com ravi.pesala@gmail.com
Sending e-mails to: commits@carbondata.apache.org
Sending e-mails to: commits@carbondata.apache.org
channel stopped
Not sending mail to unregistered user 441586683@qq.com
Not sending mail to unregistered user carbondatacontributions@gmail.com
Not sending mail to unregistered user jacky.likun@qq.com
Not sending mail to unregistered user kunalkapoor642@gmail.com
Not sending mail to unregistered user vikramahuja8803@gmail.com