You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/12/30 09:22:36 UTC

Build failed in Jenkins: carbondata-master-spark-2.2 » Apache CarbonData :: Examples #1411

See <https://builds.apache.org/job/carbondata-master-spark-2.2/org.apache.carbondata$carbondata-examples/1411/display/redirect>

------------------------------------------
[...truncated 547.47 KB...]
2018-12-30 09:21:02 AUDIT audit:93 - {"time":"December 30, 2018 1:21:02 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922878866316736","opStatus":"FAILED","opTime":"1421 ms","table":"default.timeSeriesTable","extraInfo":{"Exception":"java.lang.RuntimeException","Message":"Data Load failed for DataMap. Please check logs for the failure"}}
- TimeSeriesPreAggregateTableExample *** FAILED ***
  java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure
  at org.apache.spark.sql.execution.command.preaaggregate.LoadPostAggregateListener$.onEvent(PreAggregateListeners.scala:554)
  at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
  at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:525)
  at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:591)
  at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:318)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:147)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:144)
  at org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:140)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:144)
  ...
2018-12-30 09:21:03 AUDIT audit:72 - {"time":"December 30, 2018 1:21:03 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922881037146876","opStatus":"START"}
2018-12-30 09:21:03 AUDIT audit:93 - {"time":"December 30, 2018 1:21:03 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922881037146876","opStatus":"SUCCESS","opTime":"95 ms","table":"default.persontable","extraInfo":{"bad_record_path":"","streaming":"false","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-30 09:21:03 AUDIT audit:72 - {"time":"December 30, 2018 1:21:03 AM PST","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"5922881136077750","opStatus":"START"}
2018-12-30 09:21:04 AUDIT audit:93 - {"time":"December 30, 2018 1:21:04 AM PST","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"5922881136077750","opStatus":"SUCCESS","opTime":"859 ms","table":"default.personTable","extraInfo":{"SegmentId":"0","DataSize":"771.33KB","IndexSize":"657.0B"}}
2018-12-30 09:21:04 AUDIT audit:72 - {"time":"December 30, 2018 1:21:04 AM PST","username":"jenkins","opName":"CREATE DATAMAP","opId":"5922882001467974","opStatus":"START"}
2018-12-30 09:21:06 AUDIT audit:93 - {"time":"December 30, 2018 1:21:06 AM PST","username":"jenkins","opName":"CREATE DATAMAP","opId":"5922882001467974","opStatus":"SUCCESS","opTime":"2231 ms","table":"default.persontable","extraInfo":{"provider":"lucene","dmName":"dm","index_columns":"id , name"}}
2018-12-30 09:21:06 ERROR DiskBlockObjectWriter:91 - Uncaught exception while reverting partial writes to file /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b
java.io.FileNotFoundException: /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter$$anonfun$revertPartialWritesAndClose$2.apply$mcV$sp(DiskBlockObjectWriter.scala:217)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1346)
	at org.apache.spark.storage.DiskBlockObjectWriter.revertPartialWritesAndClose(DiskBlockObjectWriter.scala:214)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.stop(BypassMergeSortShuffleWriter.java:237)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:102)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2018-12-30 09:21:06 ERROR BypassMergeSortShuffleWriter:239 - Error while deleting file /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b
2018-12-30 09:21:06 ERROR Executor:91 - Exception in task 0.0 in stage 341.0 (TID 2346)
java.io.FileNotFoundException: /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2018-12-30 09:21:06 ERROR TaskSetManager:70 - Task 0 in stage 341.0 failed 1 times; aborting job
- LuceneDataMapExample *** FAILED ***
  org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 341.0 failed 1 times, most recent failure: Lost task 0.0 in stage 341.0 (TID 2346, localhost, executor driver): java.io.FileNotFoundException: /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
  at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at scala.Option.foreach(Option.scala:257)
  at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
  ...
  Cause: java.io.FileNotFoundException: /tmp/blockmgr-df6fe533-d39e-4bdf-a745-27aeaaf85a6b/09/temp_shuffle_769770bb-deeb-4735-ba15-a6e3ec20ed4b (No such file or directory)
  at java.io.FileOutputStream.open0(Native Method)
  at java.io.FileOutputStream.open(FileOutputStream.java:270)
  at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
  at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)
  at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)
  at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)
  at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
  at org.apache.spark.scheduler.Task.run(Task.scala:108)
  ...
2018-12-30 09:21:06 AUDIT audit:72 - {"time":"December 30, 2018 1:21:06 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922884545965540","opStatus":"START"}
2018-12-30 09:21:06 AUDIT audit:93 - {"time":"December 30, 2018 1:21:06 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922884545965540","opStatus":"SUCCESS","opTime":"79 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-30 09:21:06 AUDIT audit:72 - {"time":"December 30, 2018 1:21:06 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922884632826902","opStatus":"START"}
2018-12-30 09:21:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:07 AUDIT audit:93 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922884632826902","opStatus":"SUCCESS","opTime":"221 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:21:07 AUDIT audit:72 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922884861244042","opStatus":"START"}
2018-12-30 09:21:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:07 AUDIT audit:93 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922884861244042","opStatus":"SUCCESS","opTime":"183 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:21:07 AUDIT audit:72 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885051515709","opStatus":"START"}
2018-12-30 09:21:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:07 AUDIT audit:93 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885051515709","opStatus":"SUCCESS","opTime":"235 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:21:07 AUDIT audit:72 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885294117599","opStatus":"START"}
2018-12-30 09:21:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:07 AUDIT audit:93 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885294117599","opStatus":"SUCCESS","opTime":"204 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-30 09:21:07 AUDIT audit:72 - {"time":"December 30, 2018 1:21:07 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922885604613762","opStatus":"START"}
2018-12-30 09:21:08 AUDIT audit:93 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922885604613762","opStatus":"SUCCESS","opTime":"112 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-30 09:21:08 AUDIT audit:72 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885799764114","opStatus":"START"}
2018-12-30 09:21:08 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:08 AUDIT audit:93 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885799764114","opStatus":"SUCCESS","opTime":"178 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:21:08 AUDIT audit:72 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885983567461","opStatus":"START"}
2018-12-30 09:21:08 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:21:08 AUDIT audit:93 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922885983567461","opStatus":"SUCCESS","opTime":"182 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-30 09:21:08 AUDIT audit:72 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5922886254781642","opStatus":"START"}
2018-12-30 09:21:08 AUDIT audit:93 - {"time":"December 30, 2018 1:21:08 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5922886254781642","opStatus":"SUCCESS","opTime":"87 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-30 09:21:08 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_5922886505072445

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-30 09:21:09 AUDIT audit:72 - {"time":"December 30, 2018 1:21:09 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922886690925667","opStatus":"START"}
2018-12-30 09:21:09 AUDIT audit:93 - {"time":"December 30, 2018 1:21:09 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5922886690925667","opStatus":"SUCCESS","opTime":"75 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-30 09:21:09 AUDIT audit:72 - {"time":"December 30, 2018 1:21:09 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922886780800211","opStatus":"START"}
2018-12-30 09:21:09 AUDIT audit:93 - {"time":"December 30, 2018 1:21:09 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5922886780800211","opStatus":"SUCCESS","opTime":"183 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Run completed in 2 minutes, 55 seconds.
Total number of tests run: 20
Suites: completed 2, aborted 0
Tests: succeeded 18, failed 2, canceled 0, ignored 0, pending 0
*** 2 TESTS FAILED ***
[JENKINS] Recording test results

Jenkins build is back to normal : carbondata-master-spark-2.2 » Apache CarbonData :: Examples #1412

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.2/org.apache.carbondata$carbondata-examples/1412/display/redirect>